diff --git a/libs/core/uv.lock b/libs/core/uv.lock index f7d4152c000..70ddba6fddf 100644 --- a/libs/core/uv.lock +++ b/libs/core/uv.lock @@ -1170,7 +1170,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "." }] test-integration = [] typing = [ @@ -1184,10 +1184,14 @@ version = "0.3.9" source = { directory = "../text-splitters" } dependencies = [ { name = "langchain-core" }, + { name = "pip" }, ] [package.metadata] -requires-dist = [{ name = "langchain-core", editable = "." }] +requires-dist = [ + { name = "langchain-core", editable = "." }, + { name = "pip", specifier = ">=25.2" }, +] [package.metadata.requires-dev] dev = [ @@ -1196,7 +1200,7 @@ dev = [ ] lint = [ { name = "langchain-core", editable = "." }, - { name = "ruff", specifier = ">=0.12.2,<0.13" }, + { name = "ruff", specifier = ">=0.12.8,<0.13" }, ] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, @@ -1213,6 +1217,7 @@ test-integration = [ { name = "sentence-transformers", specifier = ">=3.0.1" }, { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, + { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, ] typing = [ @@ -1993,6 +1998,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, ] +[[package]] +name = "pip" +version = "25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" }, +] + [[package]] name = "platformdirs" version = "4.3.8" diff --git a/libs/langchain/langchain/evaluation/embedding_distance/base.py b/libs/langchain/langchain/evaluation/embedding_distance/base.py index bf65305114b..bf9e0a798a0 100644 --- a/libs/langchain/langchain/evaluation/embedding_distance/base.py +++ b/libs/langchain/langchain/evaluation/embedding_distance/base.py @@ -201,15 +201,41 @@ class _EmbeddingDistanceChainMixin(Chain): np.ndarray: The cosine distance. """ try: - from langchain_community.utils.math import cosine_similarity - except ImportError as e: - msg = ( - "The cosine_similarity function is required to compute cosine distance." - " Please install the langchain-community package using" - " `pip install langchain-community`." - ) - raise ImportError(msg) from e - return 1.0 - cosine_similarity(a, b) + from langchain_core.vectorstores.utils import _cosine_similarity + + return 1.0 - _cosine_similarity(a, b) + except ImportError: + # Fallback to scipy if available + try: + from scipy.spatial.distance import cosine + + return cosine(a.flatten(), b.flatten()) + except ImportError: + # Pure numpy fallback + if _check_numpy(): + np = _import_numpy() + a_flat = a.flatten() + b_flat = b.flatten() + dot_product = np.dot(a_flat, b_flat) + norm_a = np.linalg.norm(a_flat) + norm_b = np.linalg.norm(b_flat) + if norm_a == 0 or norm_b == 0: + return 0.0 + return 1.0 - (dot_product / (norm_a * norm_b)) + # Pure Python implementation + a_flat = a if hasattr(a, "__len__") else [a] + b_flat = b if hasattr(b, "__len__") else [b] + if hasattr(a, "flatten"): + a_flat = a.flatten() + if hasattr(b, "flatten"): + b_flat = b.flatten() + + dot_product = sum(x * y for x, y in zip(a_flat, b_flat)) + norm_a = sum(x * x for x in a_flat) ** 0.5 + norm_b = sum(x * x for x in b_flat) ** 0.5 + if norm_a == 0 or norm_b == 0: + return 0.0 + return 1.0 - (dot_product / (norm_a * norm_b)) @staticmethod def _euclidean_distance(a: Any, b: Any) -> Any: @@ -222,12 +248,17 @@ class _EmbeddingDistanceChainMixin(Chain): Returns: np.floating: The Euclidean distance. """ - if _check_numpy(): - import numpy as np + try: + from scipy.spatial.distance import euclidean - return np.linalg.norm(a - b) + return euclidean(a.flatten(), b.flatten()) + except ImportError: + if _check_numpy(): + import numpy as np - return sum((x - y) * (x - y) for x, y in zip(a, b)) ** 0.5 + return np.linalg.norm(a - b) + + return sum((x - y) * (x - y) for x, y in zip(a, b)) ** 0.5 @staticmethod def _manhattan_distance(a: Any, b: Any) -> Any: @@ -240,11 +271,16 @@ class _EmbeddingDistanceChainMixin(Chain): Returns: np.floating: The Manhattan distance. """ - if _check_numpy(): - np = _import_numpy() - return np.sum(np.abs(a - b)) + try: + from scipy.spatial.distance import cityblock - return sum(abs(x - y) for x, y in zip(a, b)) + return cityblock(a.flatten(), b.flatten()) + except ImportError: + if _check_numpy(): + np = _import_numpy() + return np.sum(np.abs(a - b)) + + return sum(abs(x - y) for x, y in zip(a, b)) @staticmethod def _chebyshev_distance(a: Any, b: Any) -> Any: @@ -257,11 +293,16 @@ class _EmbeddingDistanceChainMixin(Chain): Returns: np.floating: The Chebyshev distance. """ - if _check_numpy(): - np = _import_numpy() - return np.max(np.abs(a - b)) + try: + from scipy.spatial.distance import chebyshev - return max(abs(x - y) for x, y in zip(a, b)) + return chebyshev(a.flatten(), b.flatten()) + except ImportError: + if _check_numpy(): + np = _import_numpy() + return np.max(np.abs(a - b)) + + return max(abs(x - y) for x, y in zip(a, b)) @staticmethod def _hamming_distance(a: Any, b: Any) -> Any: @@ -274,11 +315,16 @@ class _EmbeddingDistanceChainMixin(Chain): Returns: np.floating: The Hamming distance. """ - if _check_numpy(): - np = _import_numpy() - return np.mean(a != b) + try: + from scipy.spatial.distance import hamming - return sum(1 for x, y in zip(a, b) if x != y) / len(a) + return hamming(a.flatten(), b.flatten()) + except ImportError: + if _check_numpy(): + np = _import_numpy() + return np.mean(a != b) + + return sum(1 for x, y in zip(a, b) if x != y) / len(a) def _compute_score(self, vectors: Any) -> float: """Compute the score based on the distance metric. diff --git a/libs/langchain/uv.lock b/libs/langchain/uv.lock index 47e886a827d..4865dd818be 100644 --- a/libs/langchain/uv.lock +++ b/libs/langchain/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9, <4.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -2241,7 +2241,7 @@ test = [ test-integration = [] typing = [ { name = "langchain-text-splitters", directory = "../text-splitters" }, - { name = "mypy", specifier = ">=1.15,<1.16" }, + { name = "mypy", specifier = ">=1.17.1,<1.18" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, ] @@ -2445,7 +2445,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "../core" }] test-integration = [] typing = [ @@ -2459,10 +2459,14 @@ version = "0.3.9" source = { editable = "../text-splitters" } dependencies = [ { name = "langchain-core" }, + { name = "pip" }, ] [package.metadata] -requires-dist = [{ name = "langchain-core", editable = "../core" }] +requires-dist = [ + { name = "langchain-core", editable = "../core" }, + { name = "pip", specifier = ">=25.2" }, +] [package.metadata.requires-dev] dev = [ @@ -2471,7 +2475,7 @@ dev = [ ] lint = [ { name = "langchain-core", editable = "../core" }, - { name = "ruff", specifier = ">=0.12.2,<0.13" }, + { name = "ruff", specifier = ">=0.12.8,<0.13" }, ] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, @@ -2488,6 +2492,7 @@ test-integration = [ { name = "sentence-transformers", specifier = ">=3.0.1" }, { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, + { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, ] typing = [ @@ -3333,6 +3338,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/67/936f9814bdd74b2dfd4822f1f7725ab5d8ff4103919a1664eb4874c58b2f/pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", size = 2626353, upload-time = "2025-01-02T08:13:52.725Z" }, ] +[[package]] +name = "pip" +version = "25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" }, +] + [[package]] name = "platformdirs" version = "4.3.6" diff --git a/libs/partners/anthropic/pyproject.toml b/libs/partners/anthropic/pyproject.toml index 61a256e3a38..3ccc65bb7c5 100644 --- a/libs/partners/anthropic/pyproject.toml +++ b/libs/partners/anthropic/pyproject.toml @@ -105,6 +105,8 @@ ignore = [ "D105", # pydocstyle: Missing docstring in magic method "D107", # pydocstyle: Missing docstring in __init__ "D407", # pydocstyle: Missing-dashed-underline-after-section + "D203", + "D213", "D214", # Section over-indented, doesn't play well with reStructuredText "COM812", # Messes with the formatter "ISC001", # Messes with the formatter diff --git a/libs/partners/anthropic/uv.lock b/libs/partners/anthropic/uv.lock index e6d9cab0a66..e1efa8416fc 100644 --- a/libs/partners/anthropic/uv.lock +++ b/libs/partners/anthropic/uv.lock @@ -598,7 +598,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "../../core" }] test-integration = [] typing = [ diff --git a/libs/partners/fireworks/pyproject.toml b/libs/partners/fireworks/pyproject.toml index 93dd12fd84b..fad2a199cdb 100644 --- a/libs/partners/fireworks/pyproject.toml +++ b/libs/partners/fireworks/pyproject.toml @@ -95,6 +95,8 @@ ignore = [ "D104", # Missing docstring in public package "D105", # Missing docstring in magic method "D107", # Missing docstring in __init__ + "D203", + "D213", "COM812", # Messes with the formatter "ISC001", # Messes with the formatter "PERF203", # Rarely useful @@ -121,4 +123,4 @@ asyncio_mode = "auto" "tests/**/*.py" = [ "S101", # Tests need assertions "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes -] \ No newline at end of file +] diff --git a/libs/partners/groq/pyproject.toml b/libs/partners/groq/pyproject.toml index 570383ac4cb..46845695580 100644 --- a/libs/partners/groq/pyproject.toml +++ b/libs/partners/groq/pyproject.toml @@ -88,6 +88,8 @@ ignore = [ "D104", # Missing docstring in public package "D105", # Missing docstring in magic method "D107", # Missing docstring in __init__ + "D203", + "D213", "COM812", # Messes with the formatter "ISC001", # Messes with the formatter "PERF203", # Rarely useful diff --git a/libs/partners/groq/uv.lock b/libs/partners/groq/uv.lock index 71a5ef39a3a..d872ffda8ec 100644 --- a/libs/partners/groq/uv.lock +++ b/libs/partners/groq/uv.lock @@ -485,7 +485,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "../../core" }] test-integration = [] typing = [ diff --git a/libs/partners/huggingface/pyproject.toml b/libs/partners/huggingface/pyproject.toml index 48ee6132566..13ed737ba80 100644 --- a/libs/partners/huggingface/pyproject.toml +++ b/libs/partners/huggingface/pyproject.toml @@ -102,6 +102,8 @@ ignore = [ "D104", # pydocstyle: Missing docstring in public package "D105", # pydocstyle: Missing docstring in magic method "D107", # pydocstyle: Missing docstring in __init__ + "D203", + "D213", "D407", # pydocstyle: Missing-dashed-underline-after-section "COM812", # Messes with the formatter "ISC001", # Messes with the formatter diff --git a/libs/partners/ollama/Makefile b/libs/partners/ollama/Makefile index 68f307e27e1..de0768ab599 100644 --- a/libs/partners/ollama/Makefile +++ b/libs/partners/ollama/Makefile @@ -8,10 +8,14 @@ UV_FROZEN = true # Define a variable for the test file path. TEST_FILE ?= tests/unit_tests/ -integration_test: TEST_FILE = tests/integration_tests/ +integration_test integration_tests: TEST_FILE = tests/integration_tests/ # note: leaving out integration_tests (with s) command to skip release testing for now # TODO(erick) configure ollama server to run in CI, in separate repo +# Define variables for test model configuration +OLLAMA_TEST_MODEL ?= llama3.1 +OLLAMA_REASONING_TEST_MODEL ?= deepseek-r1:1.5b + # unit tests are run with the --disable-socket flag to prevent network calls test tests: @@ -22,10 +26,9 @@ test_watch: # integration tests are run without the --disable-socket flag to allow network calls -integration_test: - uv run --group test --group test_integration pytest $(TEST_FILE) +integration_test integration_tests: + OLLAMA_TEST_MODEL=$(OLLAMA_TEST_MODEL) OLLAMA_REASONING_TEST_MODEL=$(OLLAMA_REASONING_TEST_MODEL) uv run --group test --group test_integration pytest $(TEST_FILE) # note: leaving out integration_tests (with s) command to skip release testing for now -# TODO(erick) configure ollama server to run in CI, in separate repo ###################### # LINTING AND FORMATTING @@ -70,3 +73,6 @@ help: @echo 'test - run unit tests' @echo 'tests - run unit tests' @echo 'test TEST_FILE= - run all tests in file' + @echo 'integration_test - run integration tests' + @echo 'integration_test OLLAMA_TEST_MODEL= - run integration tests with specific model' + @echo ' Example: make integration_test OLLAMA_TEST_MODEL=llama3.1' diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 0e9625890a8..aa350643101 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -145,10 +145,14 @@ def _parse_arguments_from_tool_call( """ if "function" not in raw_tool_call: return None + function_name = raw_tool_call["function"]["name"] arguments = raw_tool_call["function"]["arguments"] parsed_arguments: dict = {} if isinstance(arguments, dict): for key, value in arguments.items(): + # Filter out metadata fields like 'functionName' that echo function name + if key == "functionName" and value == function_name: + continue if isinstance(value, str): parsed_value = _parse_json_string( value, skip=True, raw_tool_call=raw_tool_call diff --git a/libs/partners/ollama/pyproject.toml b/libs/partners/ollama/pyproject.toml index 6a63a4615a1..a4b3128acb1 100644 --- a/libs/partners/ollama/pyproject.toml +++ b/libs/partners/ollama/pyproject.toml @@ -125,4 +125,4 @@ asyncio_mode = "auto" "tests/**/*.py" = [ "S101", # Tests need assertions "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes -] \ No newline at end of file +] diff --git a/libs/partners/ollama/tests/integration_tests/test_embeddings.py b/libs/partners/ollama/tests/integration_tests/test_embeddings.py index bf8e3a04bec..65f242f8bab 100644 --- a/libs/partners/ollama/tests/integration_tests/test_embeddings.py +++ b/libs/partners/ollama/tests/integration_tests/test_embeddings.py @@ -1,10 +1,12 @@ """Test Ollama embeddings.""" +import os + from langchain_tests.integration_tests import EmbeddingsIntegrationTests from langchain_ollama.embeddings import OllamaEmbeddings -MODEL_NAME = "llama3.1" +MODEL_NAME = os.environ.get("OLLAMA_TEST_MODEL", "llama3.1") class TestOllamaEmbeddings(EmbeddingsIntegrationTests): diff --git a/libs/partners/ollama/tests/integration_tests/test_llms.py b/libs/partners/ollama/tests/integration_tests/test_llms.py index 9f2209d4d95..34136f0862d 100644 --- a/libs/partners/ollama/tests/integration_tests/test_llms.py +++ b/libs/partners/ollama/tests/integration_tests/test_llms.py @@ -1,13 +1,15 @@ """Test OllamaLLM llm.""" +import os + import pytest from langchain_core.outputs import GenerationChunk from langchain_core.runnables import RunnableConfig from langchain_ollama.llms import OllamaLLM -MODEL_NAME = "llama3.1" -REASONING_MODEL_NAME = "deepseek-r1:1.5b" +MODEL_NAME = os.environ.get("OLLAMA_TEST_MODEL", "llama3.1") +REASONING_MODEL_NAME = os.environ.get("OLLAMA_REASONING_TEST_MODEL", "deepseek-r1:1.5b") SAMPLE = "What is 3^3?" diff --git a/libs/partners/ollama/tests/unit_tests/test_chat_models.py b/libs/partners/ollama/tests/unit_tests/test_chat_models.py index 3fb742fc4af..5bcb8262908 100644 --- a/libs/partners/ollama/tests/unit_tests/test_chat_models.py +++ b/libs/partners/ollama/tests/unit_tests/test_chat_models.py @@ -50,6 +50,41 @@ def test__parse_arguments_from_tool_call() -> None: assert isinstance(response["arg_1"], str) +def test__parse_arguments_from_tool_call_with_function_name_metadata() -> None: + """Test that functionName metadata is filtered out from tool arguments. + + Some models may include metadata like ``functionName`` in the arguments + that just echoes the function name. This should be filtered out for + no-argument tools to return an empty dictionary. + """ + # Test case where arguments contain functionName metadata + raw_tool_call_with_metadata = { + "function": { + "name": "magic_function_no_args", + "arguments": {"functionName": "magic_function_no_args"}, + } + } + response = _parse_arguments_from_tool_call(raw_tool_call_with_metadata) + assert response == {} + + # Test case where arguments contain both real args and metadata + raw_tool_call_mixed = { + "function": { + "name": "some_function", + "arguments": {"functionName": "some_function", "real_arg": "value"}, + } + } + response_mixed = _parse_arguments_from_tool_call(raw_tool_call_mixed) + assert response_mixed == {"real_arg": "value"} + + # Test case where functionName has different value (should be preserved) + raw_tool_call_different = { + "function": {"name": "function_a", "arguments": {"functionName": "function_b"}} + } + response_different = _parse_arguments_from_tool_call(raw_tool_call_different) + assert response_different == {"functionName": "function_b"} + + @contextmanager def _mock_httpx_client_stream( *args: Any, **kwargs: Any diff --git a/libs/partners/ollama/uv.lock b/libs/partners/ollama/uv.lock index aaec31a6c26..022ee022a06 100644 --- a/libs/partners/ollama/uv.lock +++ b/libs/partners/ollama/uv.lock @@ -356,7 +356,7 @@ test = [ test-integration = [] typing = [ { name = "langchain-text-splitters", directory = "../../text-splitters" }, - { name = "mypy", specifier = ">=1.15,<1.16" }, + { name = "mypy", specifier = ">=1.17.1,<1.18" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, ] @@ -456,7 +456,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "../../core" }] test-integration = [] typing = [ diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index 825e477b823..b2f010cfc93 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -73,6 +73,9 @@ ignore = [ "UP007", "UP045" ] docstring-code-format = true skip-magic-trailing-comma = true +[tool.ruff.lint.isort] +split-on-trailing-comma = false + [tool.coverage.run] omit = ["tests/*"] diff --git a/libs/partners/openai/uv.lock b/libs/partners/openai/uv.lock index d71107230bf..69f78a1ba51 100644 --- a/libs/partners/openai/uv.lock +++ b/libs/partners/openai/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -588,7 +588,7 @@ typing = [ [package.metadata] requires-dist = [ { name = "langchain-core", editable = "../../core" }, - { name = "openai", specifier = ">=1.99.3,<2.0.0" }, + { name = "openai", specifier = ">=1.86.0,<2.0.0" }, { name = "tiktoken", specifier = ">=0.7,<1" }, ] @@ -662,7 +662,7 @@ requires-dist = [ [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [{ name = "langchain-core", editable = "../../core" }] test-integration = [] typing = [ diff --git a/uv.lock b/uv.lock index 00e1cafc046..28ffabb41d2 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -2259,15 +2259,11 @@ dependencies = [ requires-dist = [ { name = "async-timeout", marker = "python_full_version < '3.11'", specifier = ">=4.0.0,<5.0.0" }, { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, - { name = "langchain-aws", marker = "extra == 'aws'" }, - { name = "langchain-azure-ai", marker = "extra == 'azure-ai'" }, - { name = "langchain-cohere", marker = "extra == 'cohere'" }, { name = "langchain-community", marker = "extra == 'community'" }, { name = "langchain-core", editable = "libs/core" }, { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, { name = "langchain-fireworks", marker = "extra == 'fireworks'" }, { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, - { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, { name = "langchain-groq", marker = "extra == 'groq'" }, { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, @@ -2276,14 +2272,13 @@ requires-dist = [ { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, { name = "langchain-text-splitters", editable = "libs/text-splitters" }, { name = "langchain-together", marker = "extra == 'together'" }, - { name = "langchain-xai", marker = "extra == 'xai'" }, { name = "langsmith", specifier = ">=0.1.17" }, { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, { name = "pyyaml", specifier = ">=5.3" }, { name = "requests", specifier = ">=2,<3" }, { name = "sqlalchemy", specifier = ">=1.4,<3" }, ] -provides-extras = ["community", "anthropic", "openai", "azure-ai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] +provides-extras = ["community", "anthropic", "openai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "deepseek", "perplexity"] [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] @@ -2483,7 +2478,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "0.3.72" +version = "0.3.74" source = { editable = "libs/core" } dependencies = [ { name = "jsonpatch" }, @@ -2534,7 +2529,7 @@ test = [ test-integration = [] typing = [ { name = "langchain-text-splitters", directory = "libs/text-splitters" }, - { name = "mypy", specifier = ">=1.15,<1.16" }, + { name = "mypy", specifier = ">=1.17.1,<1.18" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, ] @@ -2618,7 +2613,7 @@ dependencies = [ [[package]] name = "langchain-groq" -version = "0.3.6" +version = "0.3.7" source = { editable = "libs/partners/groq" } dependencies = [ { name = "groq" }, @@ -2627,7 +2622,7 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "groq", specifier = ">=0.29.0,<1" }, + { name = "groq", specifier = ">=0.30.0,<1" }, { name = "langchain-core", editable = "libs/core" }, ] @@ -2819,7 +2814,7 @@ typing = [] [[package]] name = "langchain-openai" -version = "0.3.28" +version = "0.3.29" source = { editable = "libs/partners/openai" } dependencies = [ { name = "langchain-core" }, @@ -2889,10 +2884,14 @@ version = "0.3.9" source = { editable = "libs/text-splitters" } dependencies = [ { name = "langchain-core" }, + { name = "pip" }, ] [package.metadata] -requires-dist = [{ name = "langchain-core", editable = "libs/core" }] +requires-dist = [ + { name = "langchain-core", editable = "libs/core" }, + { name = "pip", specifier = ">=25.2" }, +] [package.metadata.requires-dev] dev = [ @@ -2901,7 +2900,7 @@ dev = [ ] lint = [ { name = "langchain-core", editable = "libs/core" }, - { name = "ruff", specifier = ">=0.12.2,<0.13" }, + { name = "ruff", specifier = ">=0.12.8,<0.13" }, ] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, @@ -2918,6 +2917,7 @@ test-integration = [ { name = "sentence-transformers", specifier = ">=3.0.1" }, { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, + { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, ] typing = [ @@ -4438,6 +4438,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/67/936f9814bdd74b2dfd4822f1f7725ab5d8ff4103919a1664eb4874c58b2f/pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", size = 2626353, upload-time = "2025-01-02T08:13:52.725Z" }, ] +[[package]] +name = "pip" +version = "25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" }, +] + [[package]] name = "platformdirs" version = "4.3.6"