mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-23 03:22:38 +00:00
Merge branch 'master' into pprados/pdf-router
This commit is contained in:
commit
5c2f21b254
7
.github/workflows/_release.yml
vendored
7
.github/workflows/_release.yml
vendored
@ -395,8 +395,11 @@ jobs:
|
||||
|
||||
# Checkout the latest package files
|
||||
rm -rf $GITHUB_WORKSPACE/libs/partners/${{ matrix.partner }}/*
|
||||
cd $GITHUB_WORKSPACE/libs/partners/${{ matrix.partner }}
|
||||
git checkout "$LATEST_PACKAGE_TAG" -- .
|
||||
rm -rf $GITHUB_WORKSPACE/libs/standard-tests/*
|
||||
cd $GITHUB_WORKSPACE/libs/
|
||||
git checkout "$LATEST_PACKAGE_TAG" -- standard-tests/
|
||||
git checkout "$LATEST_PACKAGE_TAG" -- partners/${{ matrix.partner }}/
|
||||
cd partners/${{ matrix.partner }}
|
||||
|
||||
# Print as a sanity check
|
||||
echo "Version number from pyproject.toml: "
|
||||
|
@ -37,14 +37,18 @@ T = TypeVar("T")
|
||||
|
||||
def _hash_string_to_uuid(input_string: str) -> uuid.UUID:
|
||||
"""Hashes a string and returns the corresponding UUID."""
|
||||
hash_value = hashlib.sha1(input_string.encode("utf-8")).hexdigest() # noqa: S324
|
||||
hash_value = hashlib.sha1(
|
||||
input_string.encode("utf-8"), usedforsecurity=False
|
||||
).hexdigest()
|
||||
return uuid.uuid5(NAMESPACE_UUID, hash_value)
|
||||
|
||||
|
||||
def _hash_nested_dict_to_uuid(data: dict[Any, Any]) -> uuid.UUID:
|
||||
"""Hashes a nested dictionary and returns the corresponding UUID."""
|
||||
serialized_data = json.dumps(data, sort_keys=True)
|
||||
hash_value = hashlib.sha1(serialized_data.encode("utf-8")).hexdigest() # noqa: S324
|
||||
hash_value = hashlib.sha1(
|
||||
serialized_data.encode("utf-8"), usedforsecurity=False
|
||||
).hexdigest()
|
||||
return uuid.uuid5(NAMESPACE_UUID, hash_value)
|
||||
|
||||
|
||||
|
@ -5,15 +5,20 @@ from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.load.dump import dumpd, dumps
|
||||
from langchain_core.load.load import load, loads
|
||||
from langchain_core.load.load import loads
|
||||
from langchain_core.load.serializable import Serializable
|
||||
|
||||
# Unfortunately, we have to eagerly import load from langchain_core/load/load.py
|
||||
# eagerly to avoid a namespace conflict. We want users to still be able to use
|
||||
# `from langchain_core.load import load` to get the load function, but
|
||||
# the `from langchain_core.load.load import load` absolute import should also work.
|
||||
from langchain_core.load.load import load
|
||||
|
||||
__all__ = ["dumpd", "dumps", "load", "loads", "Serializable"]
|
||||
|
||||
_dynamic_imports = {
|
||||
"dumpd": "dump",
|
||||
"dumps": "dump",
|
||||
"load": "load",
|
||||
"loads": "load",
|
||||
"Serializable": "serializable",
|
||||
}
|
||||
|
@ -99,4 +99,4 @@ def __getattr__(attr_name: str) -> object:
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
return __all__
|
||||
|
@ -25,7 +25,6 @@ from pydantic import (
|
||||
from typing_extensions import Self, override
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.load import Serializable
|
||||
from langchain_core.messages import (
|
||||
AIMessage,
|
||||
AnyMessage,
|
||||
@ -39,6 +38,10 @@ from langchain_core.messages.base import get_msg_title_repr
|
||||
from langchain_core.prompt_values import ChatPromptValue, ImageURL, PromptValue
|
||||
from langchain_core.prompts.base import BasePromptTemplate
|
||||
from langchain_core.prompts.image import ImagePromptTemplate
|
||||
from langchain_core.prompts.message import (
|
||||
BaseMessagePromptTemplate,
|
||||
_DictMessagePromptTemplate,
|
||||
)
|
||||
from langchain_core.prompts.prompt import PromptTemplate
|
||||
from langchain_core.prompts.string import (
|
||||
PromptTemplateFormat,
|
||||
@ -52,87 +55,6 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
|
||||
class BaseMessagePromptTemplate(Serializable, ABC):
|
||||
"""Base class for message prompt templates."""
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable.
|
||||
|
||||
Returns: True.
|
||||
"""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Default namespace is ["langchain", "prompts", "chat"].
|
||||
"""
|
||||
return ["langchain", "prompts", "chat"]
|
||||
|
||||
@abstractmethod
|
||||
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Format messages from kwargs. Should return a list of BaseMessages.
|
||||
|
||||
Args:
|
||||
**kwargs: Keyword arguments to use for formatting.
|
||||
|
||||
Returns:
|
||||
List of BaseMessages.
|
||||
"""
|
||||
|
||||
async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Async format messages from kwargs.
|
||||
|
||||
Args:
|
||||
**kwargs: Keyword arguments to use for formatting.
|
||||
|
||||
Returns:
|
||||
List of BaseMessages.
|
||||
"""
|
||||
return self.format_messages(**kwargs)
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def input_variables(self) -> list[str]:
|
||||
"""Input variables for this prompt template.
|
||||
|
||||
Returns:
|
||||
List of input variables.
|
||||
"""
|
||||
|
||||
def pretty_repr(
|
||||
self,
|
||||
html: bool = False, # noqa: FBT001,FBT002
|
||||
) -> str:
|
||||
"""Human-readable representation.
|
||||
|
||||
Args:
|
||||
html: Whether to format as HTML. Defaults to False.
|
||||
|
||||
Returns:
|
||||
Human-readable representation.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def pretty_print(self) -> None:
|
||||
"""Print a human-readable representation."""
|
||||
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
|
||||
|
||||
def __add__(self, other: Any) -> ChatPromptTemplate:
|
||||
"""Combine two prompt templates.
|
||||
|
||||
Args:
|
||||
other: Another prompt template.
|
||||
|
||||
Returns:
|
||||
Combined prompt template.
|
||||
"""
|
||||
prompt = ChatPromptTemplate(messages=[self])
|
||||
return prompt + other
|
||||
|
||||
|
||||
class MessagesPlaceholder(BaseMessagePromptTemplate):
|
||||
"""Prompt template that assumes variable is already list of messages.
|
||||
|
||||
@ -473,7 +395,10 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
"""Human message prompt template. This is a message sent from the user."""
|
||||
|
||||
prompt: Union[
|
||||
StringPromptTemplate, list[Union[StringPromptTemplate, ImagePromptTemplate]]
|
||||
StringPromptTemplate,
|
||||
list[
|
||||
Union[StringPromptTemplate, ImagePromptTemplate, _DictMessagePromptTemplate]
|
||||
],
|
||||
]
|
||||
"""Prompt template."""
|
||||
additional_kwargs: dict = Field(default_factory=dict)
|
||||
@ -484,7 +409,10 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
@classmethod
|
||||
def from_template(
|
||||
cls: type[Self],
|
||||
template: Union[str, list[Union[str, _TextTemplateParam, _ImageTemplateParam]]],
|
||||
template: Union[
|
||||
str,
|
||||
list[Union[str, _TextTemplateParam, _ImageTemplateParam, dict[str, Any]]],
|
||||
],
|
||||
template_format: PromptTemplateFormat = "f-string",
|
||||
*,
|
||||
partial_variables: Optional[dict[str, Any]] = None,
|
||||
@ -567,6 +495,19 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
msg = f"Invalid image template: {tmpl}"
|
||||
raise ValueError(msg)
|
||||
prompt.append(img_template_obj)
|
||||
elif isinstance(tmpl, dict):
|
||||
if template_format == "jinja2":
|
||||
msg = (
|
||||
"jinja2 is unsafe and is not supported for templates "
|
||||
"expressed as dicts. Please use 'f-string' or 'mustache' "
|
||||
"format."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
data_template_obj = _DictMessagePromptTemplate(
|
||||
template=cast("dict[str, Any]", tmpl),
|
||||
template_format=template_format,
|
||||
)
|
||||
prompt.append(data_template_obj)
|
||||
else:
|
||||
msg = f"Invalid template: {tmpl}"
|
||||
raise ValueError(msg)
|
||||
@ -644,11 +585,16 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
for prompt in self.prompt:
|
||||
inputs = {var: kwargs[var] for var in prompt.input_variables}
|
||||
if isinstance(prompt, StringPromptTemplate):
|
||||
formatted: Union[str, ImageURL] = prompt.format(**inputs)
|
||||
formatted: Union[str, ImageURL, dict[str, Any]] = prompt.format(
|
||||
**inputs
|
||||
)
|
||||
content.append({"type": "text", "text": formatted})
|
||||
elif isinstance(prompt, ImagePromptTemplate):
|
||||
formatted = prompt.format(**inputs)
|
||||
content.append({"type": "image_url", "image_url": formatted})
|
||||
elif isinstance(prompt, _DictMessagePromptTemplate):
|
||||
formatted = prompt.format(**inputs)
|
||||
content.append(formatted)
|
||||
return self._msg_class(
|
||||
content=content, additional_kwargs=self.additional_kwargs
|
||||
)
|
||||
@ -671,11 +617,16 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
for prompt in self.prompt:
|
||||
inputs = {var: kwargs[var] for var in prompt.input_variables}
|
||||
if isinstance(prompt, StringPromptTemplate):
|
||||
formatted: Union[str, ImageURL] = await prompt.aformat(**inputs)
|
||||
formatted: Union[str, ImageURL, dict[str, Any]] = await prompt.aformat(
|
||||
**inputs
|
||||
)
|
||||
content.append({"type": "text", "text": formatted})
|
||||
elif isinstance(prompt, ImagePromptTemplate):
|
||||
formatted = await prompt.aformat(**inputs)
|
||||
content.append({"type": "image_url", "image_url": formatted})
|
||||
elif isinstance(prompt, _DictMessagePromptTemplate):
|
||||
formatted = prompt.format(**inputs)
|
||||
content.append(formatted)
|
||||
return self._msg_class(
|
||||
content=content, additional_kwargs=self.additional_kwargs
|
||||
)
|
||||
@ -811,7 +762,7 @@ MessageLikeRepresentation = Union[
|
||||
Union[str, list[dict], list[object]],
|
||||
],
|
||||
str,
|
||||
dict,
|
||||
dict[str, Any],
|
||||
]
|
||||
|
||||
|
||||
@ -984,7 +935,8 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
|
||||
"""
|
||||
_messages = [
|
||||
_convert_to_message(message, template_format) for message in messages
|
||||
_convert_to_message_template(message, template_format)
|
||||
for message in messages
|
||||
]
|
||||
|
||||
# Automatically infer input variables from messages
|
||||
@ -1071,7 +1023,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
ValueError: If input variables do not match.
|
||||
"""
|
||||
messages = values["messages"]
|
||||
input_vars = set()
|
||||
input_vars: set = set()
|
||||
optional_variables = set()
|
||||
input_types: dict[str, Any] = values.get("input_types", {})
|
||||
for message in messages:
|
||||
@ -1126,7 +1078,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
return cls.from_messages([message])
|
||||
|
||||
@classmethod
|
||||
@deprecated("0.0.1", alternative="from_messages classmethod", pending=True)
|
||||
@deprecated("0.0.1", alternative="from_messages", pending=True)
|
||||
def from_role_strings(
|
||||
cls, string_messages: list[tuple[str, str]]
|
||||
) -> ChatPromptTemplate:
|
||||
@ -1146,7 +1098,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@deprecated("0.0.1", alternative="from_messages classmethod", pending=True)
|
||||
@deprecated("0.0.1", alternative="from_messages", pending=True)
|
||||
def from_strings(
|
||||
cls, string_messages: list[tuple[type[BaseMessagePromptTemplate], str]]
|
||||
) -> ChatPromptTemplate:
|
||||
@ -1297,7 +1249,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
Args:
|
||||
message: representation of a message to append.
|
||||
"""
|
||||
self.messages.append(_convert_to_message(message))
|
||||
self.messages.append(_convert_to_message_template(message))
|
||||
|
||||
def extend(self, messages: Sequence[MessageLikeRepresentation]) -> None:
|
||||
"""Extend the chat template with a sequence of messages.
|
||||
@ -1305,7 +1257,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
Args:
|
||||
messages: sequence of message representations to append.
|
||||
"""
|
||||
self.messages.extend([_convert_to_message(message) for message in messages])
|
||||
self.messages.extend(
|
||||
[_convert_to_message_template(message) for message in messages]
|
||||
)
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> MessageLike: ...
|
||||
@ -1425,7 +1379,7 @@ def _create_template_from_message_type(
|
||||
return message
|
||||
|
||||
|
||||
def _convert_to_message(
|
||||
def _convert_to_message_template(
|
||||
message: MessageLikeRepresentation,
|
||||
template_format: PromptTemplateFormat = "f-string",
|
||||
) -> Union[BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate]:
|
||||
@ -1488,3 +1442,7 @@ def _convert_to_message(
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
return _message
|
||||
|
||||
|
||||
# For backwards compat:
|
||||
_convert_to_message = _convert_to_message_template
|
||||
|
@ -14,10 +14,8 @@ from typing_extensions import override
|
||||
|
||||
from langchain_core.example_selectors import BaseExampleSelector
|
||||
from langchain_core.messages import BaseMessage, get_buffer_string
|
||||
from langchain_core.prompts.chat import (
|
||||
BaseChatPromptTemplate,
|
||||
BaseMessagePromptTemplate,
|
||||
)
|
||||
from langchain_core.prompts.chat import BaseChatPromptTemplate
|
||||
from langchain_core.prompts.message import BaseMessagePromptTemplate
|
||||
from langchain_core.prompts.prompt import PromptTemplate
|
||||
from langchain_core.prompts.string import (
|
||||
DEFAULT_FORMATTER_MAPPING,
|
||||
|
186
libs/core/langchain_core/prompts/message.py
Normal file
186
libs/core/langchain_core/prompts/message.py
Normal file
@ -0,0 +1,186 @@
|
||||
"""Message prompt templates."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
|
||||
from langchain_core.load import Serializable
|
||||
from langchain_core.messages import BaseMessage, convert_to_messages
|
||||
from langchain_core.prompts.string import (
|
||||
DEFAULT_FORMATTER_MAPPING,
|
||||
get_template_variables,
|
||||
)
|
||||
from langchain_core.utils.interactive_env import is_interactive_env
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.prompts.chat import ChatPromptTemplate
|
||||
|
||||
|
||||
class BaseMessagePromptTemplate(Serializable, ABC):
|
||||
"""Base class for message prompt templates."""
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable.
|
||||
|
||||
Returns: True.
|
||||
"""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Default namespace is ["langchain", "prompts", "chat"].
|
||||
"""
|
||||
return ["langchain", "prompts", "chat"]
|
||||
|
||||
@abstractmethod
|
||||
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Format messages from kwargs. Should return a list of BaseMessages.
|
||||
|
||||
Args:
|
||||
**kwargs: Keyword arguments to use for formatting.
|
||||
|
||||
Returns:
|
||||
List of BaseMessages.
|
||||
"""
|
||||
|
||||
async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Async format messages from kwargs.
|
||||
|
||||
Args:
|
||||
**kwargs: Keyword arguments to use for formatting.
|
||||
|
||||
Returns:
|
||||
List of BaseMessages.
|
||||
"""
|
||||
return self.format_messages(**kwargs)
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def input_variables(self) -> list[str]:
|
||||
"""Input variables for this prompt template.
|
||||
|
||||
Returns:
|
||||
List of input variables.
|
||||
"""
|
||||
|
||||
def pretty_repr(
|
||||
self,
|
||||
html: bool = False, # noqa: FBT001,FBT002
|
||||
) -> str:
|
||||
"""Human-readable representation.
|
||||
|
||||
Args:
|
||||
html: Whether to format as HTML. Defaults to False.
|
||||
|
||||
Returns:
|
||||
Human-readable representation.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def pretty_print(self) -> None:
|
||||
"""Print a human-readable representation."""
|
||||
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
|
||||
|
||||
def __add__(self, other: Any) -> ChatPromptTemplate:
|
||||
"""Combine two prompt templates.
|
||||
|
||||
Args:
|
||||
other: Another prompt template.
|
||||
|
||||
Returns:
|
||||
Combined prompt template.
|
||||
"""
|
||||
from langchain_core.prompts.chat import ChatPromptTemplate
|
||||
|
||||
prompt = ChatPromptTemplate(messages=[self])
|
||||
return prompt + other
|
||||
|
||||
|
||||
class _DictMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
"""Template represented by a dict that recursively fills input vars in string vals.
|
||||
|
||||
Special handling of image_url dicts to load local paths. These look like:
|
||||
``{"type": "image_url", "image_url": {"path": "..."}}``
|
||||
"""
|
||||
|
||||
template: dict[str, Any]
|
||||
template_format: Literal["f-string", "mustache"]
|
||||
|
||||
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
msg_dict = _insert_input_variables(self.template, kwargs, self.template_format)
|
||||
return convert_to_messages([msg_dict])
|
||||
|
||||
@property
|
||||
def input_variables(self) -> list[str]:
|
||||
return _get_input_variables(self.template, self.template_format)
|
||||
|
||||
@property
|
||||
def _prompt_type(self) -> str:
|
||||
return "message-dict-prompt"
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
return ["langchain_core", "prompts", "message"]
|
||||
|
||||
def format(
|
||||
self,
|
||||
**kwargs: Any,
|
||||
) -> dict[str, Any]:
|
||||
"""Format the prompt with the inputs."""
|
||||
return _insert_input_variables(self.template, kwargs, self.template_format)
|
||||
|
||||
|
||||
def _get_input_variables(
|
||||
template: dict, template_format: Literal["f-string", "mustache"]
|
||||
) -> list[str]:
|
||||
input_variables = []
|
||||
for v in template.values():
|
||||
if isinstance(v, str):
|
||||
input_variables += get_template_variables(v, template_format)
|
||||
elif isinstance(v, dict):
|
||||
input_variables += _get_input_variables(v, template_format)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
for x in v:
|
||||
if isinstance(x, str):
|
||||
input_variables += get_template_variables(x, template_format)
|
||||
elif isinstance(x, dict):
|
||||
input_variables += _get_input_variables(x, template_format)
|
||||
return list(set(input_variables))
|
||||
|
||||
|
||||
def _insert_input_variables(
|
||||
template: dict[str, Any],
|
||||
inputs: dict[str, Any],
|
||||
template_format: Literal["f-string", "mustache"],
|
||||
) -> dict[str, Any]:
|
||||
formatted = {}
|
||||
formatter = DEFAULT_FORMATTER_MAPPING[template_format]
|
||||
for k, v in template.items():
|
||||
if isinstance(v, str):
|
||||
formatted[k] = formatter(v, **inputs)
|
||||
elif isinstance(v, dict):
|
||||
# No longer support loading local images.
|
||||
if k == "image_url" and "path" in v:
|
||||
msg = (
|
||||
"Specifying image inputs via file path in environments with "
|
||||
"user-input paths is a security vulnerability. Out of an abundance "
|
||||
"of caution, the utility has been removed to prevent possible "
|
||||
"misuse."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
formatted[k] = _insert_input_variables(v, inputs, template_format)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
formatted_v = []
|
||||
for x in v:
|
||||
if isinstance(x, str):
|
||||
formatted_v.append(formatter(x, **inputs))
|
||||
elif isinstance(x, dict):
|
||||
formatted_v.append(
|
||||
_insert_input_variables(x, inputs, template_format)
|
||||
)
|
||||
formatted[k] = type(v)(formatted_v)
|
||||
return formatted
|
@ -1,3 +1,3 @@
|
||||
"""langchain-core version information and utilities."""
|
||||
|
||||
VERSION = "0.3.51"
|
||||
VERSION = "0.3.52"
|
||||
|
@ -17,7 +17,7 @@ dependencies = [
|
||||
"pydantic<3.0.0,>=2.7.4; python_full_version >= \"3.12.4\"",
|
||||
]
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
version = "0.3.52"
|
||||
description = "Building applications with LLMs through composability"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import typing
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
@ -666,9 +665,7 @@ class FakeTokenCountingModel(FakeChatModel):
|
||||
self,
|
||||
messages: list[BaseMessage],
|
||||
tools: Optional[
|
||||
Sequence[
|
||||
Union[typing.Dict[str, Any], type, Callable, BaseTool] # noqa: UP006
|
||||
]
|
||||
Sequence[Union[dict[str, Any], type, Callable, BaseTool]]
|
||||
] = None,
|
||||
) -> int:
|
||||
return dummy_token_counter(messages)
|
||||
|
@ -146,7 +146,6 @@ def test_pydantic_output_parser() -> None:
|
||||
)
|
||||
|
||||
result = pydantic_parser.parse(DEF_RESULT)
|
||||
print("parse_result:", result) # noqa: T201
|
||||
assert result == DEF_EXPECTED_RESULT
|
||||
assert pydantic_parser.OutputType is TestModel
|
||||
|
||||
|
@ -18,26 +18,29 @@ from langchain_core.messages import (
|
||||
ChatMessage,
|
||||
HumanMessage,
|
||||
SystemMessage,
|
||||
ToolMessage,
|
||||
get_buffer_string,
|
||||
)
|
||||
from langchain_core.prompt_values import ChatPromptValue
|
||||
from langchain_core.prompts import PromptTemplate
|
||||
from langchain_core.prompts.chat import (
|
||||
AIMessagePromptTemplate,
|
||||
BaseMessagePromptTemplate,
|
||||
ChatMessagePromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
HumanMessagePromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
SystemMessagePromptTemplate,
|
||||
_convert_to_message,
|
||||
_convert_to_message_template,
|
||||
)
|
||||
from langchain_core.prompts.message import BaseMessagePromptTemplate
|
||||
from langchain_core.prompts.string import PromptTemplateFormat
|
||||
from langchain_core.utils.pydantic import (
|
||||
PYDANTIC_VERSION,
|
||||
)
|
||||
from tests.unit_tests.pydantic_utils import _normalize_schema
|
||||
|
||||
CUR_DIR = Path(__file__).parent.absolute().resolve()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def messages() -> list[BaseMessagePromptTemplate]:
|
||||
@ -521,7 +524,7 @@ def test_convert_to_message(
|
||||
args: Any, expected: Union[BaseMessage, BaseMessagePromptTemplate]
|
||||
) -> None:
|
||||
"""Test convert to message."""
|
||||
assert _convert_to_message(args) == expected
|
||||
assert _convert_to_message_template(args) == expected
|
||||
|
||||
|
||||
def test_chat_prompt_template_indexing() -> None:
|
||||
@ -566,7 +569,7 @@ def test_convert_to_message_is_strict() -> None:
|
||||
# meow does not correspond to a valid message type.
|
||||
# this test is here to ensure that functionality to interpret `meow`
|
||||
# as a role is NOT added.
|
||||
_convert_to_message(("meow", "question"))
|
||||
_convert_to_message_template(("meow", "question"))
|
||||
|
||||
|
||||
def test_chat_message_partial() -> None:
|
||||
@ -955,7 +958,7 @@ def test_chat_prompt_w_msgs_placeholder_ser_des(snapshot: SnapshotAssertion) ->
|
||||
assert load(dumpd(prompt)) == prompt
|
||||
|
||||
|
||||
async def test_chat_tmpl_serdes(snapshot: SnapshotAssertion) -> None:
|
||||
def test_chat_tmpl_serdes(snapshot: SnapshotAssertion) -> None:
|
||||
"""Test chat prompt template ser/des."""
|
||||
template = ChatPromptTemplate(
|
||||
[
|
||||
@ -1006,6 +1009,89 @@ async def test_chat_tmpl_serdes(snapshot: SnapshotAssertion) -> None:
|
||||
assert load(dumpd(template)) == template
|
||||
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=(
|
||||
"In a breaking release, we can update `_convert_to_message_template` to use "
|
||||
"_DictMessagePromptTemplate for all `dict` inputs, allowing for templatization "
|
||||
"of message attributes outside content blocks. That would enable the below "
|
||||
"test to pass."
|
||||
)
|
||||
)
|
||||
def test_chat_tmpl_dict_msg() -> None:
|
||||
template = ChatPromptTemplate(
|
||||
[
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "{text1}",
|
||||
"cache_control": {"type": "ephemeral"},
|
||||
},
|
||||
],
|
||||
"name": "{name1}",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "{tool_name1}",
|
||||
"args": {"arg1": "{tool_arg1}"},
|
||||
"id": "1",
|
||||
"type": "tool_call",
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"content": "{tool_content2}",
|
||||
"tool_call_id": "1",
|
||||
"name": "{tool_name1}",
|
||||
},
|
||||
]
|
||||
)
|
||||
expected = [
|
||||
AIMessage(
|
||||
[
|
||||
{
|
||||
"type": "text",
|
||||
"text": "important message",
|
||||
"cache_control": {"type": "ephemeral"},
|
||||
},
|
||||
],
|
||||
name="foo",
|
||||
tool_calls=[
|
||||
{
|
||||
"name": "do_stuff",
|
||||
"args": {"arg1": "important arg1"},
|
||||
"id": "1",
|
||||
"type": "tool_call",
|
||||
}
|
||||
],
|
||||
),
|
||||
ToolMessage("foo", name="do_stuff", tool_call_id="1"),
|
||||
]
|
||||
|
||||
actual = template.invoke(
|
||||
{
|
||||
"text1": "important message",
|
||||
"name1": "foo",
|
||||
"tool_arg1": "important arg1",
|
||||
"tool_name1": "do_stuff",
|
||||
"tool_content2": "foo",
|
||||
}
|
||||
).to_messages()
|
||||
assert actual == expected
|
||||
|
||||
partial_ = template.partial(text1="important message")
|
||||
actual = partial_.invoke(
|
||||
{
|
||||
"name1": "foo",
|
||||
"tool_arg1": "important arg1",
|
||||
"tool_name1": "do_stuff",
|
||||
"tool_content2": "foo",
|
||||
}
|
||||
).to_messages()
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_chat_prompt_template_variable_names() -> None:
|
||||
"""This test was written for an edge case that triggers a warning from Pydantic.
|
||||
|
||||
@ -1049,3 +1135,87 @@ def test_chat_prompt_template_variable_names() -> None:
|
||||
"title": "PromptInput",
|
||||
"type": "object",
|
||||
}
|
||||
|
||||
|
||||
def test_data_prompt_template_deserializable() -> None:
|
||||
"""Test that the image prompt template is serializable."""
|
||||
load(
|
||||
dumpd(
|
||||
ChatPromptTemplate.from_messages(
|
||||
[
|
||||
(
|
||||
"system",
|
||||
[{"type": "image", "source_type": "url", "url": "{url}"}],
|
||||
)
|
||||
]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.requires("jinja2")
|
||||
@pytest.mark.parametrize(
|
||||
("template_format", "cache_control_placeholder", "source_data_placeholder"),
|
||||
[
|
||||
("f-string", "{cache_type}", "{source_data}"),
|
||||
("mustache", "{{cache_type}}", "{{source_data}}"),
|
||||
],
|
||||
)
|
||||
def test_chat_prompt_template_data_prompt_from_message(
|
||||
template_format: PromptTemplateFormat,
|
||||
cache_control_placeholder: str,
|
||||
source_data_placeholder: str,
|
||||
) -> None:
|
||||
prompt: dict = {
|
||||
"type": "image",
|
||||
"source_type": "base64",
|
||||
"data": f"{source_data_placeholder}",
|
||||
}
|
||||
|
||||
template = ChatPromptTemplate.from_messages(
|
||||
[("human", [prompt])], template_format=template_format
|
||||
)
|
||||
assert template.format_messages(source_data="base64data") == [
|
||||
HumanMessage(
|
||||
content=[
|
||||
{
|
||||
"type": "image",
|
||||
"source_type": "base64",
|
||||
"data": "base64data",
|
||||
}
|
||||
]
|
||||
)
|
||||
]
|
||||
|
||||
# metadata
|
||||
prompt["metadata"] = {"cache_control": {"type": f"{cache_control_placeholder}"}}
|
||||
template = ChatPromptTemplate.from_messages(
|
||||
[("human", [prompt])], template_format=template_format
|
||||
)
|
||||
assert template.format_messages(
|
||||
cache_type="ephemeral", source_data="base64data"
|
||||
) == [
|
||||
HumanMessage(
|
||||
content=[
|
||||
{
|
||||
"type": "image",
|
||||
"source_type": "base64",
|
||||
"data": "base64data",
|
||||
"metadata": {"cache_control": {"type": "ephemeral"}},
|
||||
}
|
||||
]
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_dict_message_prompt_template_errors_on_jinja2() -> None:
|
||||
prompt = {
|
||||
"type": "image",
|
||||
"source_type": "base64",
|
||||
"data": "{source_data}",
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError, match="jinja2"):
|
||||
_ = ChatPromptTemplate.from_messages(
|
||||
[("human", [prompt])], template_format="jinja2"
|
||||
)
|
||||
|
61
libs/core/tests/unit_tests/prompts/test_message.py
Normal file
61
libs/core/tests/unit_tests/prompts/test_message.py
Normal file
@ -0,0 +1,61 @@
|
||||
from pathlib import Path
|
||||
|
||||
from langchain_core.messages import AIMessage, BaseMessage, ToolMessage
|
||||
from langchain_core.prompts.message import _DictMessagePromptTemplate
|
||||
|
||||
CUR_DIR = Path(__file__).parent.absolute().resolve()
|
||||
|
||||
|
||||
def test__dict_message_prompt_template_fstring() -> None:
|
||||
template = {
|
||||
"role": "assistant",
|
||||
"content": [
|
||||
{"type": "text", "text": "{text1}", "cache_control": {"type": "ephemeral"}},
|
||||
],
|
||||
"name": "{name1}",
|
||||
"tool_calls": [
|
||||
{
|
||||
"name": "{tool_name1}",
|
||||
"args": {"arg1": "{tool_arg1}"},
|
||||
"id": "1",
|
||||
"type": "tool_call",
|
||||
}
|
||||
],
|
||||
}
|
||||
prompt = _DictMessagePromptTemplate(template=template, template_format="f-string")
|
||||
expected: BaseMessage = AIMessage(
|
||||
[
|
||||
{
|
||||
"type": "text",
|
||||
"text": "important message",
|
||||
"cache_control": {"type": "ephemeral"},
|
||||
},
|
||||
],
|
||||
name="foo",
|
||||
tool_calls=[
|
||||
{
|
||||
"name": "do_stuff",
|
||||
"args": {"arg1": "important arg1"},
|
||||
"id": "1",
|
||||
"type": "tool_call",
|
||||
}
|
||||
],
|
||||
)
|
||||
actual = prompt.format_messages(
|
||||
text1="important message",
|
||||
name1="foo",
|
||||
tool_arg1="important arg1",
|
||||
tool_name1="do_stuff",
|
||||
)[0]
|
||||
assert actual == expected
|
||||
|
||||
template = {
|
||||
"role": "tool",
|
||||
"content": "{content1}",
|
||||
"tool_call_id": "1",
|
||||
"name": "{name1}",
|
||||
}
|
||||
prompt = _DictMessagePromptTemplate(template=template, template_format="f-string")
|
||||
expected = ToolMessage("foo", name="bar", tool_call_id="1")
|
||||
actual = prompt.format_messages(content1="foo", name1="bar")[0]
|
||||
assert actual == expected
|
@ -1,5 +1,4 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
@ -938,7 +937,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
version = "0.3.52"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
|
@ -133,6 +133,7 @@ def init_chat_model(
|
||||
- 'mistral...' -> 'mistralai'
|
||||
- 'deepseek...' -> 'deepseek'
|
||||
- 'grok...' -> 'xai'
|
||||
- 'sonar...' -> 'perplexity'
|
||||
configurable_fields: Which model parameters are
|
||||
configurable:
|
||||
|
||||
@ -504,6 +505,8 @@ def _attempt_infer_model_provider(model_name: str) -> Optional[str]:
|
||||
return "deepseek"
|
||||
elif model_name.startswith("grok"):
|
||||
return "xai"
|
||||
elif model_name.startswith("sonar"):
|
||||
return "perplexity"
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@ -8,11 +8,11 @@ license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"anthropic<1,>=0.49.0",
|
||||
"langchain-core<1.0.0,>=0.3.45",
|
||||
"langchain-core<1.0.0,>=0.3.52",
|
||||
"pydantic<3.0.0,>=2.7.4",
|
||||
]
|
||||
name = "langchain-anthropic"
|
||||
version = "0.3.10"
|
||||
version = "0.3.11"
|
||||
description = "An integration package connecting AnthropicMessages and LangChain"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -1,7 +1,8 @@
|
||||
version = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12.4'",
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.12.4' and python_full_version < '3.13'",
|
||||
"python_full_version >= '3.12' and python_full_version < '3.12.4'",
|
||||
"python_full_version < '3.12'",
|
||||
]
|
||||
@ -406,7 +407,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-anthropic"
|
||||
version = "0.3.10"
|
||||
version = "0.3.11"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "anthropic" },
|
||||
@ -485,7 +486,7 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.48"
|
||||
version = "0.3.52"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -515,16 +516,18 @@ dev = [
|
||||
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
|
||||
lint = [{ name = "ruff", specifier = ">=0.11.2,<0.12.0" }]
|
||||
test = [
|
||||
{ name = "blockbuster", specifier = "~=1.5.18" },
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<3" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-benchmark" },
|
||||
{ name = "pytest-codspeed" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
@ -535,15 +538,14 @@ test = [
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.15,<1.16" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.16"
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
@ -560,7 +562,8 @@ dependencies = [
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", specifier = ">=1.26.2,<3" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.20,<1" },
|
||||
{ name = "pytest-socket", specifier = ">=0.6.0,<1" },
|
||||
@ -698,7 +701,8 @@ name = "numpy"
|
||||
version = "2.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12.4'",
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.12.4' and python_full_version < '3.13'",
|
||||
"python_full_version >= '3.12' and python_full_version < '3.12.4'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ec/d0/c12ddfd3a02274be06ffc71f3efc6d0e457b0409c4481596881e748cb264/numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f", size = 20233295 }
|
||||
|
@ -7,13 +7,13 @@ authors = []
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.2,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43",
|
||||
"langchain-core>=0.3.52",
|
||||
"numpy>=1.26.0; python_version < '3.13'",
|
||||
"numpy>=2.1.0; python_version >= '3.13'",
|
||||
"chromadb!=0.5.10,!=0.5.11,!=0.5.12,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,<0.7.0,>=0.4.0",
|
||||
]
|
||||
name = "langchain-chroma"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
description = "An integration package connecting Chroma and LangChain"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -736,7 +736,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-chroma"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "chromadb" },
|
||||
@ -814,7 +814,7 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
version = "0.3.52"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -854,6 +854,8 @@ test = [
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-benchmark" },
|
||||
{ name = "pytest-codspeed" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
@ -864,15 +866,14 @@ test = [
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.15,<1.16" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
|
@ -30,6 +30,7 @@ from langchain_core.messages import (
|
||||
SystemMessage,
|
||||
ToolCall,
|
||||
ToolMessage,
|
||||
is_data_content_block,
|
||||
)
|
||||
from langchain_core.messages.ai import UsageMetadata
|
||||
from langchain_core.messages.tool import tool_call
|
||||
@ -173,6 +174,20 @@ def _lc_tool_call_to_openai_tool_call(tool_call: ToolCall) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def _get_image_from_data_content_block(block: dict) -> str:
|
||||
"""Format standard data content block to format expected by Ollama."""
|
||||
if block["type"] == "image":
|
||||
if block["source_type"] == "base64":
|
||||
return block["data"]
|
||||
else:
|
||||
error_message = "Image data only supported through in-line base64 format."
|
||||
raise ValueError(error_message)
|
||||
|
||||
else:
|
||||
error_message = f"Blocks of type {block['type']} not supported."
|
||||
raise ValueError(error_message)
|
||||
|
||||
|
||||
def _is_pydantic_class(obj: Any) -> bool:
|
||||
return isinstance(obj, type) and is_basemodel_subclass(obj)
|
||||
|
||||
@ -553,7 +568,9 @@ class ChatOllama(BaseChatModel):
|
||||
images.append(image_url_components[1])
|
||||
else:
|
||||
images.append(image_url_components[0])
|
||||
|
||||
elif is_data_content_block(content_part):
|
||||
image = _get_image_from_data_content_block(content_part)
|
||||
images.append(image)
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unsupported message content type. "
|
||||
@ -719,6 +736,11 @@ class ChatOllama(BaseChatModel):
|
||||
is_thinking = False
|
||||
for stream_resp in self._create_chat_stream(messages, stop, **kwargs):
|
||||
if not isinstance(stream_resp, str):
|
||||
if stream_resp.get("done") is True:
|
||||
generation_info = dict(stream_resp)
|
||||
_ = generation_info.pop("message", None)
|
||||
else:
|
||||
generation_info = None
|
||||
chunk = ChatGenerationChunk(
|
||||
message=AIMessageChunk(
|
||||
content=(
|
||||
@ -732,11 +754,7 @@ class ChatOllama(BaseChatModel):
|
||||
),
|
||||
tool_calls=_get_tool_calls_from_response(stream_resp),
|
||||
),
|
||||
generation_info=(
|
||||
dict(stream_resp).pop("message", None)
|
||||
if stream_resp.get("done") is True
|
||||
else None
|
||||
),
|
||||
generation_info=generation_info,
|
||||
)
|
||||
if chunk.generation_info and (
|
||||
model := chunk.generation_info.get("model")
|
||||
@ -773,6 +791,11 @@ class ChatOllama(BaseChatModel):
|
||||
is_thinking = False
|
||||
async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):
|
||||
if not isinstance(stream_resp, str):
|
||||
if stream_resp.get("done") is True:
|
||||
generation_info = dict(stream_resp)
|
||||
_ = generation_info.pop("message", None)
|
||||
else:
|
||||
generation_info = None
|
||||
chunk = ChatGenerationChunk(
|
||||
message=AIMessageChunk(
|
||||
content=(
|
||||
@ -786,11 +809,7 @@ class ChatOllama(BaseChatModel):
|
||||
),
|
||||
tool_calls=_get_tool_calls_from_response(stream_resp),
|
||||
),
|
||||
generation_info=(
|
||||
dict(stream_resp).pop("message", None)
|
||||
if stream_resp.get("done") is True
|
||||
else None
|
||||
),
|
||||
generation_info=generation_info,
|
||||
)
|
||||
if chunk.generation_info and (
|
||||
model := chunk.generation_info.get("model")
|
||||
|
@ -14,10 +14,6 @@ class TestChatOllama(ChatModelIntegrationTests):
|
||||
def chat_model_params(self) -> dict:
|
||||
return {"model": "llama3.1"}
|
||||
|
||||
@property
|
||||
def supports_image_inputs(self) -> bool:
|
||||
return True
|
||||
|
||||
@property
|
||||
def supports_json_mode(self) -> bool:
|
||||
return True
|
||||
@ -25,3 +21,22 @@ class TestChatOllama(ChatModelIntegrationTests):
|
||||
@property
|
||||
def has_tool_choice(self) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def test_image_model() -> None:
|
||||
class ImageModelTests(ChatModelIntegrationTests):
|
||||
@property
|
||||
def chat_model_class(self) -> type[ChatOllama]:
|
||||
return ChatOllama
|
||||
|
||||
@property
|
||||
def chat_model_params(self) -> dict:
|
||||
return {"model": "gemma3:4b"}
|
||||
|
||||
@property
|
||||
def supports_image_inputs(self) -> bool:
|
||||
return True
|
||||
|
||||
test_instance = ImageModelTests()
|
||||
model = test_instance.chat_model_class(**test_instance.chat_model_params)
|
||||
ImageModelTests().test_image_inputs(model)
|
||||
|
@ -7,12 +7,12 @@ authors = []
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.49",
|
||||
"langchain-core<1.0.0,>=0.3.52",
|
||||
"openai<2.0.0,>=1.68.2",
|
||||
"tiktoken<1,>=0.7",
|
||||
]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.12"
|
||||
version = "0.3.13"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
@ -464,7 +463,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
version = "0.3.52"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -504,6 +503,8 @@ test = [
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-benchmark" },
|
||||
{ name = "pytest-codspeed" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
@ -514,15 +515,14 @@ test = [
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.15,<1.16" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.12"
|
||||
version = "0.3.13"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
@ -609,7 +609,7 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
|
@ -7,11 +7,11 @@ authors = []
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.50",
|
||||
"langchain-core<1.0.0,>=0.3.52",
|
||||
"openai<2.0.0,>=1.68.2",
|
||||
]
|
||||
name = "langchain-perplexity"
|
||||
version = "0.1.0"
|
||||
version = "0.1.1"
|
||||
description = "An integration package connecting Perplexity and LangChain"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12.4'",
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.12.4' and python_full_version < '3.13'",
|
||||
"python_full_version >= '3.12' and python_full_version < '3.12.4'",
|
||||
"python_full_version < '3.12'",
|
||||
]
|
||||
@ -464,7 +464,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.50"
|
||||
version = "0.3.52"
|
||||
source = { editable = "../../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -500,10 +500,12 @@ test = [
|
||||
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
|
||||
{ name = "grandalf", specifier = ">=0.8,<1.0" },
|
||||
{ name = "langchain-tests", directory = "../../standard-tests" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.24.0,<2.0.0" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0,<3" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-benchmark" },
|
||||
{ name = "pytest-codspeed" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
@ -514,15 +516,14 @@ test = [
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.15,<1.16" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-perplexity"
|
||||
version = "0.1.0"
|
||||
version = "0.1.1"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
@ -599,12 +600,13 @@ typing = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
source = { editable = "../../standard-tests" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "langchain-core" },
|
||||
{ name = "numpy" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.13'" },
|
||||
{ name = "numpy", version = "2.2.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.13'" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-socket" },
|
||||
@ -615,7 +617,8 @@ dependencies = [
|
||||
requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.25.0,<1" },
|
||||
{ name = "langchain-core", editable = "../../core" },
|
||||
{ name = "numpy", specifier = ">=1.26.2,<3" },
|
||||
{ name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.2" },
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=7,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.20,<1" },
|
||||
{ name = "pytest-socket", specifier = ">=0.6.0,<1" },
|
||||
@ -707,6 +710,11 @@ wheels = [
|
||||
name = "numpy"
|
||||
version = "1.26.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12.4' and python_full_version < '3.13'",
|
||||
"python_full_version >= '3.12' and python_full_version < '3.12.4'",
|
||||
"python_full_version < '3.12'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 },
|
||||
@ -746,6 +754,71 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/5f/fafd8c51235f60d49f7a88e2275e13971e90555b67da52dd6416caec32fe/numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", size = 15709730 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.2.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/78/31103410a57bc2c2b93a3597340a8119588571f6a4539067546cb9a0bfac/numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f", size = 20270701 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/89/a79e86e5c1433926ed7d60cb267fb64aa578b6101ab645800fd43b4801de/numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9", size = 21250661 },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/c2/f50921beb8afd60ed9589ad880332cfefdb805422210d327fb48f12b7a81/numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae", size = 14389926 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/b9/2c4e96130b0b0f97b0ef4a06d6dae3b39d058b21a5e2fa2decd7fd6b1c8f/numpy-2.2.4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:a84eda42bd12edc36eb5b53bbcc9b406820d3353f1994b6cfe453a33ff101775", size = 5428329 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/a5/3d7094aa898f4fc5c84cdfb26beeae780352d43f5d8bdec966c4393d644c/numpy-2.2.4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:4ba5054787e89c59c593a4169830ab362ac2bee8a969249dc56e5d7d20ff8df9", size = 6963559 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/22/fb1be710a14434c09080dd4a0acc08939f612ec02efcb04b9e210474782d/numpy-2.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7716e4a9b7af82c06a2543c53ca476fa0b57e4d760481273e09da04b74ee6ee2", size = 14368066 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/07/2e5cc71193e3ef3a219ffcf6ca4858e46ea2be09c026ddd480d596b32867/numpy-2.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf8c1d66f432ce577d0197dceaac2ac00c0759f573f28516246351c58a85020", size = 16417040 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/97/3b1537776ad9a6d1a41813818343745e8dd928a2916d4c9edcd9a8af1dac/numpy-2.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:218f061d2faa73621fa23d6359442b0fc658d5b9a70801373625d958259eaca3", size = 15879862 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b7/4472f603dd45ef36ff3d8e84e84fe02d9467c78f92cc121633dce6da307b/numpy-2.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df2f57871a96bbc1b69733cd4c51dc33bea66146b8c63cacbfed73eec0883017", size = 18206032 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/bd/6a092963fb82e6c5aa0d0440635827bbb2910da229545473bbb58c537ed3/numpy-2.2.4-cp310-cp310-win32.whl", hash = "sha256:a0258ad1f44f138b791327961caedffbf9612bfa504ab9597157806faa95194a", size = 6608517 },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/e3/cb04627bc2a1638948bc13e818df26495aa18e20d5be1ed95ab2b10b6847/numpy-2.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:0d54974f9cf14acf49c60f0f7f4084b6579d24d439453d5fc5805d46a165b542", size = 12943498 },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/fb/09e778ee3a8ea0d4dc8329cca0a9c9e65fed847d08e37eba74cb7ed4b252/numpy-2.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9e0a277bb2eb5d8a7407e14688b85fd8ad628ee4e0c7930415687b6564207a4", size = 21254989 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/0a/1212befdbecab5d80eca3cde47d304cad986ad4eec7d85a42e0b6d2cc2ef/numpy-2.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eeea959168ea555e556b8188da5fa7831e21d91ce031e95ce23747b7609f8a4", size = 14425910 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/3e/e7247c1d4f15086bb106c8d43c925b0b2ea20270224f5186fa48d4fb5cbd/numpy-2.2.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bd3ad3b0a40e713fc68f99ecfd07124195333f1e689387c180813f0e94309d6f", size = 5426490 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/fa/aa7cd6be51419b894c5787a8a93c3302a1ed4f82d35beb0613ec15bdd0e2/numpy-2.2.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cf28633d64294969c019c6df4ff37f5698e8326db68cc2b66576a51fad634880", size = 6967754 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/ee/96457c943265de9fadeb3d2ffdbab003f7fba13d971084a9876affcda095/numpy-2.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fa8fa7697ad1646b5c93de1719965844e004fcad23c91228aca1cf0800044a1", size = 14373079 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/5c/ceefca458559f0ccc7a982319f37ed07b0d7b526964ae6cc61f8ad1b6119/numpy-2.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4162988a360a29af158aeb4a2f4f09ffed6a969c9776f8f3bdee9b06a8ab7e5", size = 16428819 },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/31/9b2ac8eee99e001eb6add9fa27514ef5e9faf176169057a12860af52704c/numpy-2.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:892c10d6a73e0f14935c31229e03325a7b3093fafd6ce0af704be7f894d95687", size = 15881470 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/dc/8569b5f25ff30484b555ad8a3f537e0225d091abec386c9420cf5f7a2976/numpy-2.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db1f1c22173ac1c58db249ae48aa7ead29f534b9a948bc56828337aa84a32ed6", size = 18218144 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/05/463c023a39bdeb9bb43a99e7dee2c664cb68d5bb87d14f92482b9f6011cc/numpy-2.2.4-cp311-cp311-win32.whl", hash = "sha256:ea2bb7e2ae9e37d96835b3576a4fa4b3a97592fbea8ef7c3587078b0068b8f09", size = 6606368 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/72/10c1d2d82101c468a28adc35de6c77b308f288cfd0b88e1070f15b98e00c/numpy-2.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:f7de08cbe5551911886d1ab60de58448c6df0f67d9feb7d1fb21e9875ef95e91", size = 12947526 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/30/182db21d4f2a95904cec1a6f779479ea1ac07c0647f064dea454ec650c42/numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4", size = 20947156 },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/6d/9483566acfbda6c62c6bc74b6e981c777229d2af93c8eb2469b26ac1b7bc/numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854", size = 14133092 },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/f6/dba8a258acbf9d2bed2525cdcbb9493ef9bae5199d7a9cb92ee7e9b2aea6/numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24", size = 5163515 },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/30/82116199d1c249446723c68f2c9da40d7f062551036f50b8c4caa42ae252/numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee", size = 6696558 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/b2/54122b3c6df5df3e87582b2e9430f1bdb63af4023c739ba300164c9ae503/numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba", size = 14084742 },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592", size = 16134051 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/21/efd47800e4affc993e8be50c1b768de038363dd88865920439ef7b422c60/numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb", size = 15578972 },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/1e/f8bb88f6157045dd5d9b27ccf433d016981032690969aa5c19e332b138c0/numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f", size = 17898106 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/93/df59a5a3897c1f036ae8ff845e45f4081bb06943039ae28a3c1c7c780f22/numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00", size = 6311190 },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/69/8c4f928741c2a8efa255fdc7e9097527c6dc4e4df147e3cadc5d9357ce85/numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146", size = 12644305 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/d0/bd5ad792e78017f5decfb2ecc947422a3669a34f775679a76317af671ffc/numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7", size = 20933623 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/bc/2b3545766337b95409868f8e62053135bdc7fa2ce630aba983a2aa60b559/numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0", size = 14148681 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/70/67b24d68a56551d43a6ec9fe8c5f91b526d4c1a46a6387b956bf2d64744e/numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392", size = 5148759 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/8b/e2fc8a75fcb7be12d90b31477c9356c0cbb44abce7ffb36be39a0017afad/numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc", size = 6683092 },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/73/41b7b27f169ecf368b52533edb72e56a133f9e86256e809e169362553b49/numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298", size = 14081422 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/04/e208ff3ae3ddfbafc05910f89546382f15a3f10186b1f56bd99f159689c2/numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7", size = 16132202 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/bc/2218160574d862d5e55f803d88ddcad88beff94791f9c5f86d67bd8fbf1c/numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6", size = 15573131 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/78/97c775bc4f05abc8a8426436b7cb1be806a02a2994b195945600855e3a25/numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd", size = 17894270 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/eb/38c06217a5f6de27dcb41524ca95a44e395e6a1decdc0c99fec0832ce6ae/numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c", size = 6308141 },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3", size = 12636885 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/e2/793288ede17a0fdc921172916efb40f3cbc2aa97e76c5c84aba6dc7e8747/numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8", size = 20961829 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/75/bb4573f6c462afd1ea5cbedcc362fe3e9bdbcc57aefd37c681be1155fbaa/numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39", size = 14161419 },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/68/07b4cd01090ca46c7a336958b413cdbe75002286295f2addea767b7f16c9/numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd", size = 5196414 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/fd/d4a29478d622fedff5c4b4b4cedfc37a00691079623c0575978d2446db9e/numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0", size = 6709379 },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/78/96dddb75bb9be730b87c72f30ffdd62611aba234e4e460576a068c98eff6/numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960", size = 14051725 },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/06/5306b8199bffac2a29d9119c11f457f6c7d41115a335b78d3f86fad4dbe8/numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8", size = 16101638 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/03/74c5b631ee1ded596945c12027649e6344614144369fd3ec1aaced782882/numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc", size = 15571717 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/dc/4fc7c0283abe0981e3b89f9b332a134e237dd476b0c018e1e21083310c31/numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff", size = 17879998 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/2b/878576190c5cfa29ed896b518cc516aecc7c98a919e20706c12480465f43/numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286", size = 6366896 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/05/eb7eec66b95cf697f08c754ef26c3549d03ebd682819f794cb039574a0a6/numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d", size = 12739119 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/5c/f09c33a511aff41a098e6ef3498465d95f6360621034a3d95f47edbc9119/numpy-2.2.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7051ee569db5fbac144335e0f3b9c2337e0c8d5c9fee015f259a5bd70772b7e8", size = 21081956 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/30/74c48b3b6494c4b820b7fa1781d441e94d87a08daa5b35d222f06ba41a6f/numpy-2.2.4-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ab2939cd5bec30a7430cbdb2287b63151b77cf9624de0532d629c9a1c59b1d5c", size = 6827143 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/f5/ab0d2f48b490535c7a80e05da4a98902b632369efc04f0e47bb31ca97d8f/numpy-2.2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f35b19894a9e08639fd60a1ec1978cb7f5f7f1eace62f38dd36be8aecdef4d", size = 16233350 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/3a/2f6d8c1f8e45d496bca6baaec93208035faeb40d5735c25afac092ec9a12/numpy-2.2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b4adfbbc64014976d2f91084915ca4e626fbf2057fb81af209c1a6d776d23e3d", size = 12857565 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.70.0"
|
||||
|
@ -7,7 +7,7 @@ authors = [{ name = "Erick Friis", email = "erick@langchain.dev" }]
|
||||
license = { text = "MIT" }
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.49",
|
||||
"langchain-core<1.0.0,>=0.3.52",
|
||||
"pytest<9,>=7",
|
||||
"pytest-asyncio<1,>=0.20",
|
||||
"httpx<1,>=0.25.0",
|
||||
@ -17,7 +17,7 @@ dependencies = [
|
||||
"numpy>=2.1.0; python_version>='3.13'",
|
||||
]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
description = "Standard tests for LangChain implementations"
|
||||
readme = "README.md"
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
@ -290,7 +289,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
version = "0.3.52"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -330,6 +329,8 @@ test = [
|
||||
{ name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" },
|
||||
{ name = "pytest", specifier = ">=8,<9" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
|
||||
{ name = "pytest-benchmark" },
|
||||
{ name = "pytest-codspeed" },
|
||||
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
|
||||
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
|
||||
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
|
||||
@ -340,15 +341,14 @@ test = [
|
||||
test-integration = []
|
||||
typing = [
|
||||
{ name = "langchain-text-splitters", directory = "../text-splitters" },
|
||||
{ name = "mypy", specifier = ">=1.10,<1.11" },
|
||||
{ name = "types-jinja2", specifier = ">=2.11.9,<3.0.0" },
|
||||
{ name = "mypy", specifier = ">=1.15,<1.16" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" },
|
||||
{ name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
|
Loading…
Reference in New Issue
Block a user