mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-17 10:13:29 +00:00
ollama[patch]: ruff fixes and rules (#31924)
* bump ruff deps * add more thorough ruff rules * fix said rules
This commit is contained in:
parent
4d9eefecab
commit
1f829aacf4
@ -41,13 +41,14 @@ lint_tests: PYTHON_FILES=tests
|
||||
lint_tests: MYPY_CACHE=.mypy_cache_test
|
||||
|
||||
lint lint_diff lint_package lint_tests:
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff
|
||||
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
|
||||
|
||||
format format_diff:
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff --fix $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --fix $(PYTHON_FILES)
|
||||
|
||||
spell_check:
|
||||
uv run --all-groups codespell --toml pyproject.toml
|
||||
|
||||
|
@ -18,7 +18,7 @@ del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
__all__ = [
|
||||
"ChatOllama",
|
||||
"OllamaLLM",
|
||||
"OllamaEmbeddings",
|
||||
"OllamaLLM",
|
||||
"__version__",
|
||||
]
|
||||
|
@ -20,18 +20,21 @@ def validate_model(client: Client, model_name: str) -> None:
|
||||
if not any(
|
||||
model_name == m or m.startswith(f"{model_name}:") for m in model_names
|
||||
):
|
||||
raise ValueError(
|
||||
msg = (
|
||||
f"Model `{model_name}` not found in Ollama. Please pull the "
|
||||
f"model (using `ollama pull {model_name}`) or specify a valid "
|
||||
f"model name. Available local models: {', '.join(model_names)}"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
except ConnectError as e:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Connection to Ollama failed. Please make sure Ollama is running "
|
||||
f"and accessible at {client._client.base_url}. "
|
||||
) from e
|
||||
)
|
||||
raise ValueError(msg) from e
|
||||
except ResponseError as e:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Received an error from the Ollama API. "
|
||||
"Please check your Ollama server logs."
|
||||
) from e
|
||||
)
|
||||
raise ValueError(msg) from e
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Ollama chat models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections.abc import AsyncIterator, Iterator, Mapping, Sequence
|
||||
from operator import itemgetter
|
||||
@ -74,7 +76,9 @@ def _get_usage_metadata_from_generation_info(
|
||||
|
||||
|
||||
def _parse_json_string(
|
||||
json_string: str, raw_tool_call: dict[str, Any], skip: bool
|
||||
json_string: str,
|
||||
raw_tool_call: dict[str, Any],
|
||||
skip: bool, # noqa: FBT001
|
||||
) -> Any:
|
||||
"""Attempt to parse a JSON string for tool calling.
|
||||
|
||||
@ -148,16 +152,19 @@ def _get_tool_calls_from_response(
|
||||
) -> list[ToolCall]:
|
||||
"""Get tool calls from ollama response."""
|
||||
tool_calls = []
|
||||
if "message" in response:
|
||||
if raw_tool_calls := response["message"].get("tool_calls"):
|
||||
for tc in raw_tool_calls:
|
||||
tool_calls.append(
|
||||
tool_call(
|
||||
id=str(uuid4()),
|
||||
name=tc["function"]["name"],
|
||||
args=_parse_arguments_from_tool_call(tc) or {},
|
||||
)
|
||||
if "message" in response and (
|
||||
raw_tool_calls := response["message"].get("tool_calls")
|
||||
):
|
||||
tool_calls.extend(
|
||||
[
|
||||
tool_call(
|
||||
id=str(uuid4()),
|
||||
name=tc["function"]["name"],
|
||||
args=_parse_arguments_from_tool_call(tc) or {},
|
||||
)
|
||||
for tc in raw_tool_calls
|
||||
]
|
||||
)
|
||||
return tool_calls
|
||||
|
||||
|
||||
@ -178,14 +185,12 @@ def _get_image_from_data_content_block(block: dict) -> str:
|
||||
if block["type"] == "image":
|
||||
if block["source_type"] == "base64":
|
||||
return block["data"]
|
||||
else:
|
||||
error_message = "Image data only supported through in-line base64 format."
|
||||
raise ValueError(error_message)
|
||||
|
||||
else:
|
||||
error_message = f"Blocks of type {block['type']} not supported."
|
||||
error_message = "Image data only supported through in-line base64 format."
|
||||
raise ValueError(error_message)
|
||||
|
||||
error_message = f"Blocks of type {block['type']} not supported."
|
||||
raise ValueError(error_message)
|
||||
|
||||
|
||||
def _is_pydantic_class(obj: Any) -> bool:
|
||||
return isinstance(obj, type) and is_basemodel_subclass(obj)
|
||||
@ -459,7 +464,7 @@ class ChatOllama(BaseChatModel):
|
||||
"""Base url the model is hosted under."""
|
||||
|
||||
client_kwargs: Optional[dict] = {}
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
These arguments are passed to both synchronous and async clients.
|
||||
Use sync_client_kwargs and async_client_kwargs to pass different arguments
|
||||
to synchronous and asynchronous clients.
|
||||
@ -496,7 +501,8 @@ class ChatOllama(BaseChatModel):
|
||||
ollama_messages = self._convert_messages_to_ollama_messages(messages)
|
||||
|
||||
if self.stop is not None and stop is not None:
|
||||
raise ValueError("`stop` found in both the input and default params.")
|
||||
msg = "`stop` found in both the input and default params."
|
||||
raise ValueError(msg)
|
||||
if self.stop is not None:
|
||||
stop = self.stop
|
||||
|
||||
@ -584,7 +590,8 @@ class ChatOllama(BaseChatModel):
|
||||
role = "tool"
|
||||
tool_call_id = message.tool_call_id
|
||||
else:
|
||||
raise ValueError("Received unsupported message type for Ollama.")
|
||||
msg = "Received unsupported message type for Ollama."
|
||||
raise ValueError(msg)
|
||||
|
||||
content = ""
|
||||
images = []
|
||||
@ -608,10 +615,11 @@ class ChatOllama(BaseChatModel):
|
||||
):
|
||||
image_url = temp_image_url["url"]
|
||||
else:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Only string image_url or dict with string 'url' "
|
||||
"inside content parts are supported."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
image_url_components = image_url.split(",")
|
||||
# Support data:image/jpeg;base64,<image> format
|
||||
@ -624,22 +632,24 @@ class ChatOllama(BaseChatModel):
|
||||
image = _get_image_from_data_content_block(content_part)
|
||||
images.append(image)
|
||||
else:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Unsupported message content type. "
|
||||
"Must either have type 'text' or type 'image_url' "
|
||||
"with a string 'image_url' field."
|
||||
)
|
||||
# Should convert to ollama.Message once role includes tool, and tool_call_id is in Message # noqa: E501
|
||||
msg: dict = {
|
||||
raise ValueError(msg)
|
||||
# Should convert to ollama.Message once role includes tool,
|
||||
# and tool_call_id is in Message
|
||||
msg_: dict = {
|
||||
"role": role,
|
||||
"content": content,
|
||||
"images": images,
|
||||
}
|
||||
if tool_calls:
|
||||
msg["tool_calls"] = tool_calls
|
||||
msg_["tool_calls"] = tool_calls
|
||||
if tool_call_id:
|
||||
msg["tool_call_id"] = tool_call_id
|
||||
ollama_messages.append(msg)
|
||||
msg_["tool_call_id"] = tool_call_id
|
||||
ollama_messages.append(msg_)
|
||||
|
||||
return ollama_messages
|
||||
|
||||
@ -677,7 +687,7 @@ class ChatOllama(BaseChatModel):
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
verbose: bool = False,
|
||||
verbose: bool = False, # noqa: FBT001, FBT002
|
||||
**kwargs: Any,
|
||||
) -> ChatGenerationChunk:
|
||||
final_chunk = None
|
||||
@ -693,7 +703,8 @@ class ChatOllama(BaseChatModel):
|
||||
verbose=verbose,
|
||||
)
|
||||
if final_chunk is None:
|
||||
raise ValueError("No data received from Ollama stream.")
|
||||
msg = "No data received from Ollama stream."
|
||||
raise ValueError(msg)
|
||||
|
||||
return final_chunk
|
||||
|
||||
@ -702,7 +713,7 @@ class ChatOllama(BaseChatModel):
|
||||
messages: list[BaseMessage],
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
verbose: bool = False,
|
||||
verbose: bool = False, # noqa: FBT001, FBT002
|
||||
**kwargs: Any,
|
||||
) -> ChatGenerationChunk:
|
||||
final_chunk = None
|
||||
@ -718,7 +729,8 @@ class ChatOllama(BaseChatModel):
|
||||
verbose=verbose,
|
||||
)
|
||||
if final_chunk is None:
|
||||
raise ValueError("No data received from Ollama stream.")
|
||||
msg = "No data received from Ollama stream."
|
||||
raise ValueError(msg)
|
||||
|
||||
return final_chunk
|
||||
|
||||
@ -908,7 +920,7 @@ class ChatOllama(BaseChatModel):
|
||||
self,
|
||||
tools: Sequence[Union[dict[str, Any], type, Callable, BaseTool]],
|
||||
*,
|
||||
tool_choice: Optional[Union[dict, str, Literal["auto", "any"], bool]] = None,
|
||||
tool_choice: Optional[Union[dict, str, Literal["auto", "any"], bool]] = None, # noqa: PYI051
|
||||
**kwargs: Any,
|
||||
) -> Runnable[LanguageModelInput, BaseMessage]:
|
||||
"""Bind tool-like objects to this chat model.
|
||||
@ -923,7 +935,7 @@ class ChatOllama(BaseChatModel):
|
||||
is currently ignored as it is not supported by Ollama.**
|
||||
kwargs: Any additional parameters are passed directly to
|
||||
``self.bind(**kwargs)``.
|
||||
""" # noqa: E501
|
||||
"""
|
||||
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
|
||||
return super().bind(tools=formatted_tools, **kwargs)
|
||||
|
||||
@ -1180,14 +1192,16 @@ class ChatOllama(BaseChatModel):
|
||||
""" # noqa: E501, D301
|
||||
_ = kwargs.pop("strict", None)
|
||||
if kwargs:
|
||||
raise ValueError(f"Received unsupported arguments {kwargs}")
|
||||
msg = f"Received unsupported arguments {kwargs}"
|
||||
raise ValueError(msg)
|
||||
is_pydantic_schema = _is_pydantic_class(schema)
|
||||
if method == "function_calling":
|
||||
if schema is None:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"schema must be specified when method is not 'json_mode'. "
|
||||
"Received None."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
formatted_tool = convert_to_openai_tool(schema)
|
||||
tool_name = formatted_tool["function"]["name"]
|
||||
llm = self.bind_tools(
|
||||
@ -1222,10 +1236,11 @@ class ChatOllama(BaseChatModel):
|
||||
)
|
||||
elif method == "json_schema":
|
||||
if schema is None:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"schema must be specified when method is not 'json_mode'. "
|
||||
"Received None."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
if is_pydantic_schema:
|
||||
schema = cast(TypeBaseModel, schema)
|
||||
if issubclass(schema, BaseModelV1):
|
||||
@ -1259,10 +1274,11 @@ class ChatOllama(BaseChatModel):
|
||||
)
|
||||
output_parser = JsonOutputParser()
|
||||
else:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
f"Unrecognized method argument. Expected one of 'function_calling', "
|
||||
f"'json_schema', or 'json_mode'. Received: '{method}'"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
if include_raw:
|
||||
parser_assign = RunnablePassthrough.assign(
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Ollama embeddings models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
@ -132,7 +134,7 @@ class OllamaEmbeddings(BaseModel, Embeddings):
|
||||
"""Base url the model is hosted under."""
|
||||
|
||||
client_kwargs: Optional[dict] = {}
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
These arguments are passed to both synchronous and async clients.
|
||||
Use sync_client_kwargs and async_client_kwargs to pass different arguments
|
||||
to synchronous and asynchronous clients.
|
||||
@ -271,14 +273,14 @@ class OllamaEmbeddings(BaseModel, Embeddings):
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
"""Embed search docs."""
|
||||
if not self._client:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Ollama client is not initialized. "
|
||||
"Please ensure Ollama is running and the model is loaded."
|
||||
)
|
||||
embedded_docs = self._client.embed(
|
||||
raise ValueError(msg)
|
||||
return self._client.embed(
|
||||
self.model, texts, options=self._default_params, keep_alive=self.keep_alive
|
||||
)["embeddings"]
|
||||
return embedded_docs
|
||||
|
||||
def embed_query(self, text: str) -> list[float]:
|
||||
"""Embed query text."""
|
||||
@ -287,16 +289,16 @@ class OllamaEmbeddings(BaseModel, Embeddings):
|
||||
async def aembed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
"""Embed search docs."""
|
||||
if not self._async_client:
|
||||
raise ValueError(
|
||||
msg = (
|
||||
"Ollama client is not initialized. "
|
||||
"Please ensure Ollama is running and the model is loaded."
|
||||
)
|
||||
embedded_docs = (
|
||||
raise ValueError(msg)
|
||||
return (
|
||||
await self._async_client.embed(
|
||||
self.model, texts, keep_alive=self.keep_alive
|
||||
)
|
||||
)["embeddings"]
|
||||
return embedded_docs
|
||||
|
||||
async def aembed_query(self, text: str) -> list[float]:
|
||||
"""Embed query text."""
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Ollama large language models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Iterator, Mapping
|
||||
from typing import (
|
||||
Any,
|
||||
@ -132,22 +134,22 @@ class OllamaLLM(BaseLLM):
|
||||
"""Base url the model is hosted under."""
|
||||
|
||||
client_kwargs: Optional[dict] = {}
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
"""Additional kwargs to pass to the httpx clients.
|
||||
These arguments are passed to both synchronous and async clients.
|
||||
Use sync_client_kwargs and async_client_kwargs to pass different arguments
|
||||
to synchronous and asynchronous clients.
|
||||
"""
|
||||
|
||||
async_client_kwargs: Optional[dict] = {}
|
||||
"""Additional kwargs to merge with client_kwargs before passing to the HTTPX
|
||||
"""Additional kwargs to merge with client_kwargs before passing to the HTTPX
|
||||
AsyncClient.
|
||||
|
||||
|
||||
For a full list of the params, see the `HTTPX documentation <https://www.python-httpx.org/api/#asyncclient>`__.
|
||||
"""
|
||||
|
||||
sync_client_kwargs: Optional[dict] = {}
|
||||
"""Additional kwargs to merge with client_kwargs before passing to the HTTPX Client.
|
||||
|
||||
|
||||
For a full list of the params, see the `HTTPX documentation <https://www.python-httpx.org/api/#client>`__.
|
||||
"""
|
||||
|
||||
@ -168,7 +170,8 @@ class OllamaLLM(BaseLLM):
|
||||
**kwargs: Any,
|
||||
) -> dict[str, Any]:
|
||||
if self.stop is not None and stop is not None:
|
||||
raise ValueError("`stop` found in both the input and default params.")
|
||||
msg = "`stop` found in both the input and default params."
|
||||
raise ValueError(msg)
|
||||
if self.stop is not None:
|
||||
stop = self.stop
|
||||
|
||||
@ -193,7 +196,7 @@ class OllamaLLM(BaseLLM):
|
||||
},
|
||||
)
|
||||
|
||||
params = {
|
||||
return {
|
||||
"prompt": prompt,
|
||||
"stream": kwargs.pop("stream", True),
|
||||
"model": kwargs.pop("model", self.model),
|
||||
@ -204,8 +207,6 @@ class OllamaLLM(BaseLLM):
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
return params
|
||||
|
||||
@property
|
||||
def _llm_type(self) -> str:
|
||||
"""Return type of LLM."""
|
||||
@ -267,14 +268,14 @@ class OllamaLLM(BaseLLM):
|
||||
prompt: str,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
||||
verbose: bool = False,
|
||||
verbose: bool = False, # noqa: FBT001, FBT002
|
||||
**kwargs: Any,
|
||||
) -> GenerationChunk:
|
||||
final_chunk = None
|
||||
async for stream_resp in self._acreate_generate_stream(prompt, stop, **kwargs):
|
||||
if not isinstance(stream_resp, str):
|
||||
chunk = GenerationChunk(
|
||||
text=stream_resp["response"] if "response" in stream_resp else "",
|
||||
text=stream_resp.get("response", ""),
|
||||
generation_info=(
|
||||
dict(stream_resp) if stream_resp.get("done") is True else None
|
||||
),
|
||||
@ -290,7 +291,8 @@ class OllamaLLM(BaseLLM):
|
||||
verbose=verbose,
|
||||
)
|
||||
if final_chunk is None:
|
||||
raise ValueError("No data received from Ollama stream.")
|
||||
msg = "No data received from Ollama stream."
|
||||
raise ValueError(msg)
|
||||
|
||||
return final_chunk
|
||||
|
||||
@ -299,14 +301,14 @@ class OllamaLLM(BaseLLM):
|
||||
prompt: str,
|
||||
stop: Optional[list[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
verbose: bool = False,
|
||||
verbose: bool = False, # noqa: FBT001, FBT002
|
||||
**kwargs: Any,
|
||||
) -> GenerationChunk:
|
||||
final_chunk = None
|
||||
for stream_resp in self._create_generate_stream(prompt, stop, **kwargs):
|
||||
if not isinstance(stream_resp, str):
|
||||
chunk = GenerationChunk(
|
||||
text=stream_resp["response"] if "response" in stream_resp else "",
|
||||
text=stream_resp.get("response", ""),
|
||||
generation_info=(
|
||||
dict(stream_resp) if stream_resp.get("done") is True else None
|
||||
),
|
||||
@ -322,7 +324,8 @@ class OllamaLLM(BaseLLM):
|
||||
verbose=verbose,
|
||||
)
|
||||
if final_chunk is None:
|
||||
raise ValueError("No data received from Ollama stream.")
|
||||
msg = "No data received from Ollama stream."
|
||||
raise ValueError(msg)
|
||||
|
||||
return final_chunk
|
||||
|
||||
|
@ -48,15 +48,62 @@ target-version = "py39"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"T201", # print
|
||||
"D", # pydocstyle
|
||||
"UP", # pyupgrade
|
||||
"S", # flake8-bandit
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"COM", # flake8-commas
|
||||
"D", # pydocstyle
|
||||
"DOC", # pydoclint
|
||||
"E", # pycodestyle error
|
||||
"EM", # flake8-errmsg
|
||||
"F", # pyflakes
|
||||
"FA", # flake8-future-annotations
|
||||
"FBT", # flake8-boolean-trap
|
||||
"FLY", # flake8-flynt
|
||||
"I", # isort
|
||||
"ICN", # flake8-import-conventions
|
||||
"INT", # flake8-gettext
|
||||
"ISC", # isort-comprehensions
|
||||
"PGH", # pygrep-hooks
|
||||
"PIE", # flake8-pie
|
||||
"PERF", # flake8-perf
|
||||
"PYI", # flake8-pyi
|
||||
"Q", # flake8-quotes
|
||||
"RET", # flake8-return
|
||||
"RSE", # flake8-rst-docstrings
|
||||
"RUF", # ruff
|
||||
"S", # flake8-bandit
|
||||
"SLF", # flake8-self
|
||||
"SLOT", # flake8-slots
|
||||
"SIM", # flake8-simplify
|
||||
"T10", # flake8-debugger
|
||||
"T20", # flake8-print
|
||||
"TID", # flake8-tidy-imports
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle warning
|
||||
"YTT", # flake8-2020
|
||||
]
|
||||
ignore = [ "UP007", ]
|
||||
ignore = [
|
||||
"D100", # pydocstyle: Missing docstring in public module
|
||||
"D101", # pydocstyle: Missing docstring in public class
|
||||
"D102", # pydocstyle: Missing docstring in public method
|
||||
"D103", # pydocstyle: Missing docstring in public function
|
||||
"D104", # pydocstyle: Missing docstring in public package
|
||||
"D105", # pydocstyle: Missing docstring in magic method
|
||||
"D107", # pydocstyle: Missing docstring in __init__
|
||||
"D203", # Messes with the formatter
|
||||
"D407", # pydocstyle: Missing-dashed-underline-after-section
|
||||
"COM812", # Messes with the formatter
|
||||
"ISC001", # Messes with the formatter
|
||||
"PERF203", # Rarely useful
|
||||
"S112", # Rarely useful
|
||||
"RUF012", # Doesn't play well with Pydantic
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
"UP045", # pyupgrade: non-pep604-annotation-optional
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Ollama specific chat model integration tests"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Annotated, Optional
|
||||
|
||||
import pytest
|
||||
|
@ -21,8 +21,7 @@ def get_current_weather(location: str) -> dict:
|
||||
"""Gets the current weather in a given location."""
|
||||
if "boston" in location.lower():
|
||||
return {"temperature": "15°F", "conditions": "snow"}
|
||||
else:
|
||||
return {"temperature": "unknown", "conditions": "unknown"}
|
||||
return {"temperature": "unknown", "conditions": "unknown"}
|
||||
|
||||
|
||||
class TestChatOllama(ChatModelIntegrationTests):
|
||||
@ -65,16 +64,15 @@ class TestChatOllama(ChatModelIntegrationTests):
|
||||
|
||||
if chunk.tool_call_chunks:
|
||||
tool_chunk_found = True
|
||||
for tc_chunk in chunk.tool_call_chunks:
|
||||
collected_tool_chunks.append(tc_chunk)
|
||||
collected_tool_chunks.extend(chunk.tool_call_chunks)
|
||||
|
||||
if chunk.tool_calls:
|
||||
final_tool_calls.extend(chunk.tool_calls)
|
||||
|
||||
assert tool_chunk_found, "Tool streaming did not produce any tool_call_chunks."
|
||||
assert (
|
||||
len(final_tool_calls) == 1
|
||||
), f"Expected 1 final tool call, but got {len(final_tool_calls)}"
|
||||
assert len(final_tool_calls) == 1, (
|
||||
f"Expected 1 final tool call, but got {len(final_tool_calls)}"
|
||||
)
|
||||
|
||||
final_tool_call = final_tool_calls[0]
|
||||
assert final_tool_call["name"] == "get_current_weather"
|
||||
@ -110,16 +108,15 @@ class TestChatOllama(ChatModelIntegrationTests):
|
||||
|
||||
if chunk.tool_call_chunks:
|
||||
tool_chunk_found = True
|
||||
for tc_chunk in chunk.tool_call_chunks:
|
||||
collected_tool_chunks.append(tc_chunk)
|
||||
collected_tool_chunks.extend(chunk.tool_call_chunks)
|
||||
|
||||
if chunk.tool_calls:
|
||||
final_tool_calls.extend(chunk.tool_calls)
|
||||
|
||||
assert tool_chunk_found, "Tool streaming did not produce any tool_call_chunks."
|
||||
assert (
|
||||
len(final_tool_calls) == 1
|
||||
), f"Expected 1 final tool call, but got {len(final_tool_calls)}"
|
||||
assert len(final_tool_calls) == 1, (
|
||||
f"Expected 1 final tool call, but got {len(final_tool_calls)}"
|
||||
)
|
||||
|
||||
final_tool_call = final_tool_calls[0]
|
||||
assert final_tool_call["name"] == "get_current_weather"
|
||||
|
@ -4,4 +4,3 @@ import pytest
|
||||
@pytest.mark.compile
|
||||
def test_placeholder() -> None:
|
||||
"""Used for compiling integration tests without running any real tests."""
|
||||
pass
|
||||
|
@ -305,7 +305,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.66"
|
||||
version = "0.3.68"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -334,7 +334,7 @@ dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.11.2,<0.12.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = "~=1.5.18" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
@ -403,7 +403,7 @@ requires-dist = [
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.6,<3.0.0" }]
|
||||
dev = [{ name = "langchain-core", editable = "../../core" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.1.8,<1.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }]
|
||||
test = [
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "langchain-tests", editable = "../../standard-tests" },
|
||||
@ -456,7 +456,7 @@ requires-dist = [
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }]
|
||||
test = [{ name = "langchain-core", editable = "../../core" }]
|
||||
test-integration = []
|
||||
typing = [
|
||||
@ -1342,26 +1342,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.1.15"
|
||||
version = "0.12.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/33/7165f88a156be1c2fd13a18b3af6e75bbf82da5b6978cd2128d666accc18/ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e", size = 1971643, upload-time = "2024-01-29T23:06:05.541Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/3d/d9a195676f25d00dbfcf3cf95fdd4c685c497fcfa7e862a44ac5e4e96480/ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e", size = 4432239, upload-time = "2025-07-03T16:40:19.566Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/11/2c/fac0658910ea3ea87a23583e58277533154261b73f9460388eb2e6e02e8f/ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df", size = 14357437, upload-time = "2024-01-29T23:05:04.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/c1/2116927385c761ffb786dfb77654a634ecd7803dee4de3b47b59536374f1/ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f", size = 7329669, upload-time = "2024-01-29T23:05:12.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/d7/2199ecb42cef4d70de0e72ce4ca8878d060e25fe4434cb66f51e26158a2a/ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8", size = 7137343, upload-time = "2024-01-29T23:05:16.159Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/e0/8a6f9db2c5b8c7108c7e7347cd6beca805d1b2ae618569c72f2515d11e52/ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807", size = 6563223, upload-time = "2024-01-29T23:05:19.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/fa/2a627747a5a5f7e1d3447704f795fd35d486460838485762cd569ef8eb0e/ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec", size = 7534853, upload-time = "2024-01-29T23:05:23.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/09/c09d0f9b41d1f5e3de117579f2fcdb7063fd76cd92d6614eae1b77ccbccb/ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5", size = 8168826, upload-time = "2024-01-29T23:05:26.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/48/c9dfc2c87dc6b92446d8092c2be25b42ca4fb201cecb2499996ccf483c34/ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e", size = 7942963, upload-time = "2024-01-29T23:05:30.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/57/dbc885f94450335fcff82301c4b25cf614894e79d9afbd249714e709ab42/ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b", size = 8524998, upload-time = "2024-01-29T23:05:34.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/75/8dea2fc156ae525971fdada8723f78e605dcf89428f5686728438b12f9ef/ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1", size = 7534144, upload-time = "2024-01-29T23:05:38.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/41/96b770475c46590bfd051ca0c5f797b2d45f2638c45f3a9daf1ae55b96d6/ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5", size = 7055002, upload-time = "2024-01-29T23:05:41.955Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/ca/4066dbcc3631a4efe1fe695f42f20aca50474d760b3bd8e57d7565d75aa5/ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2", size = 6552130, upload-time = "2024-01-29T23:05:45.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/85/da93f0fc8f2424cf776fcce6daef9291162345179d16faf1401ff2890068/ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852", size = 7214386, upload-time = "2024-01-29T23:05:48.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/bf/de34ad339e0d1f6faa858cbcf793f3abc168b7aa516dd9227d843b992be8/ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447", size = 7602787, upload-time = "2024-01-29T23:05:51.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/61/ffdccecb0b39521d7060d6a6bc33c53d7f20d48d3511d6333cb01f26e979/ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f", size = 6670488, upload-time = "2024-01-29T23:05:54.454Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/5f/3ba51cc770ed2b2df88efc32bba26759e6ac5c6149319a60913a85230936/ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587", size = 7319395, upload-time = "2024-01-29T23:05:58.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/bd/c196493563d6bf8fe960f10b83926a3fae3a43a96eac6b263aecb96c61d7/ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360", size = 6998592, upload-time = "2024-01-29T23:06:01.904Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/b6/2098d0126d2d3318fd5bec3ad40d06c25d377d95749f7a0c5af17129b3b1/ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be", size = 10369761, upload-time = "2025-07-03T16:39:38.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/4b/5da0142033dbe155dc598cfb99262d8ee2449d76920ea92c4eeb9547c208/ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e", size = 11155659, upload-time = "2025-07-03T16:39:42.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/21/967b82550a503d7c5c5c127d11c935344b35e8c521f52915fc858fb3e473/ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc", size = 10537769, upload-time = "2025-07-03T16:39:44.75Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/91/00cff7102e2ec71a4890fb7ba1803f2cdb122d82787c7d7cf8041fe8cbc1/ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922", size = 10717602, upload-time = "2025-07-03T16:39:47.652Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/eb/928814daec4e1ba9115858adcda44a637fb9010618721937491e4e2283b8/ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b", size = 10198772, upload-time = "2025-07-03T16:39:49.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/fa/f15089bc20c40f4f72334f9145dde55ab2b680e51afb3b55422effbf2fb6/ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d", size = 11845173, upload-time = "2025-07-03T16:39:52.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/9f/1f6f98f39f2b9302acc161a4a2187b1e3a97634fe918a8e731e591841cf4/ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1", size = 12553002, upload-time = "2025-07-03T16:39:54.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/70/08991ac46e38ddd231c8f4fd05ef189b1b94be8883e8c0c146a025c20a19/ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4", size = 12171330, upload-time = "2025-07-03T16:39:57.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/a9/5a55266fec474acfd0a1c73285f19dd22461d95a538f29bba02edd07a5d9/ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9", size = 11774717, upload-time = "2025-07-03T16:39:59.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/e5/0c270e458fc73c46c0d0f7cf970bb14786e5fdb88c87b5e423a4bd65232b/ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da", size = 11646659, upload-time = "2025-07-03T16:40:01.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/b6/45ab96070c9752af37f0be364d849ed70e9ccede07675b0ec4e3ef76b63b/ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce", size = 10604012, upload-time = "2025-07-03T16:40:04.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/91/26a6e6a424eb147cc7627eebae095cfa0b4b337a7c1c413c447c9ebb72fd/ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d", size = 10176799, upload-time = "2025-07-03T16:40:06.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/0c/9f344583465a61c8918a7cda604226e77b2c548daf8ef7c2bfccf2b37200/ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04", size = 11241507, upload-time = "2025-07-03T16:40:08.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/b7/99c34ded8fb5f86c0280278fa89a0066c3760edc326e935ce0b1550d315d/ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342", size = 11717609, upload-time = "2025-07-03T16:40:10.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/de/8589fa724590faa057e5a6d171e7f2f6cffe3287406ef40e49c682c07d89/ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a", size = 10523823, upload-time = "2025-07-03T16:40:13.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/47/8abf129102ae4c90cba0c2199a1a9b0fa896f6f806238d6f8c14448cc748/ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639", size = 11629831, upload-time = "2025-07-03T16:40:15.478Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/1f/72d2946e3cc7456bb837e88000eb3437e55f80db339c840c04015a11115d/ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12", size = 10735334, upload-time = "2025-07-03T16:40:17.677Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
Loading…
Reference in New Issue
Block a user