diff --git a/libs/community/langchain_community/utils/openai.py b/libs/community/langchain_community/utils/openai.py index 8846d5fbc4c..1f90aa89fa1 100644 --- a/libs/community/langchain_community/utils/openai.py +++ b/libs/community/langchain_community/utils/openai.py @@ -1,10 +1,12 @@ from __future__ import annotations +import functools from importlib.metadata import version from packaging.version import parse +@functools.cache def is_openai_v1() -> bool: """Return whether OpenAI API is v1 or more.""" _version = parse(version("openai")) diff --git a/libs/community/pyproject.toml b/libs/community/pyproject.toml index 76bce09eb1e..a0098f4cecf 100644 --- a/libs/community/pyproject.toml +++ b/libs/community/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "pdm.backend" [project] authors = [] -license = {text = "MIT"} +license = { text = "MIT" } requires-python = "<4.0,>=3.9" dependencies = [ "langchain-core<1.0.0,>=0.3.34", @@ -48,6 +48,7 @@ test = [ "syrupy<5.0.0,>=4.0.2", "requests-mock<2.0.0,>=1.11.0", "pytest-xdist<4.0.0,>=3.6.1", + "blockbuster<1.6,>=1.5.13", "cffi<1.17.1; python_version < \"3.10\"", "cffi; python_version >= \"3.10\"", "langchain-core @ file:///${PROJECT_ROOT}/../core", @@ -55,13 +56,8 @@ test = [ "langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests", "toml>=0.10.2", ] -codespell = [ - "codespell<3.0.0,>=2.2.0", -] -test_integration = [ - "pytest-vcr<2.0.0,>=1.0.2", - "vcrpy<7,>=6", -] +codespell = ["codespell<3.0.0,>=2.2.0"] +test_integration = ["pytest-vcr<2.0.0,>=1.0.2", "vcrpy<7,>=6"] lint = [ "ruff<0.6,>=0.5", "cffi<1.17.1; python_version < \"3.10\"", diff --git a/libs/community/tests/unit_tests/conftest.py b/libs/community/tests/unit_tests/conftest.py index dd4080cfca1..63f3a83e0bb 100644 --- a/libs/community/tests/unit_tests/conftest.py +++ b/libs/community/tests/unit_tests/conftest.py @@ -1,12 +1,28 @@ """Configuration for unit tests.""" +from collections.abc import Iterator from importlib import util from typing import Dict, Sequence import pytest +from blockbuster import blockbuster_ctx from pytest import Config, Function, Parser +@pytest.fixture(autouse=True) +def blockbuster() -> Iterator[None]: + with blockbuster_ctx("langchain_community") as bb: + ( + bb.functions["os.stat"] + .can_block_in("langchain_community/utils/openai.py", "is_openai_v1") + .can_block_in("httpx/_client.py", "_init_transport") + ) + bb.functions["os.path.abspath"].can_block_in( + "sqlalchemy/dialects/sqlite/pysqlite.py", "create_connect_args" + ) + yield + + def pytest_addoption(parser: Parser) -> None: """Add custom command line options to pytest.""" parser.addoption( diff --git a/libs/community/tests/unit_tests/test_cache.py b/libs/community/tests/unit_tests/test_cache.py index 66ab8a74d7a..24887d619ba 100644 --- a/libs/community/tests/unit_tests/test_cache.py +++ b/libs/community/tests/unit_tests/test_cache.py @@ -62,20 +62,31 @@ def set_cache_and_teardown(request: FixtureRequest) -> Generator[None, None, Non raise ValueError("Cache not set. This should never happen.") -async def test_llm_caching() -> None: +def test_llm_caching() -> None: prompt = "How are you?" response = "Test response" cached_response = "Cached test response" llm = FakeListLLM(responses=[response]) if llm_cache := get_llm_cache(): - # sync test llm_cache.update( prompt=prompt, llm_string=create_llm_string(llm), return_val=[Generation(text=cached_response)], ) assert llm.invoke(prompt) == cached_response - # async test + else: + raise ValueError( + "The cache not set. This should never happen, as the pytest fixture " + "`set_cache_and_teardown` always sets the cache." + ) + + +async def test_llm_caching_async() -> None: + prompt = "How are you?" + response = "Test response" + cached_response = "Cached test response" + llm = FakeListLLM(responses=[response]) + if llm_cache := get_llm_cache(): await llm_cache.aupdate( prompt=prompt, llm_string=create_llm_string(llm), @@ -110,14 +121,13 @@ def test_old_sqlite_llm_caching() -> None: assert llm.invoke(prompt) == cached_response -async def test_chat_model_caching() -> None: +def test_chat_model_caching() -> None: prompt: List[BaseMessage] = [HumanMessage(content="How are you?")] response = "Test response" cached_response = "Cached test response" cached_message = AIMessage(content=cached_response) llm = FakeListChatModel(responses=[response]) if llm_cache := get_llm_cache(): - # sync test llm_cache.update( prompt=dumps(prompt), llm_string=llm._get_llm_string(), @@ -126,8 +136,20 @@ async def test_chat_model_caching() -> None: result = llm.invoke(prompt) assert isinstance(result, AIMessage) assert result.content == cached_response + else: + raise ValueError( + "The cache not set. This should never happen, as the pytest fixture " + "`set_cache_and_teardown` always sets the cache." + ) - # async test + +async def test_chat_model_caching_async() -> None: + prompt: List[BaseMessage] = [HumanMessage(content="How are you?")] + response = "Test response" + cached_response = "Cached test response" + cached_message = AIMessage(content=cached_response) + llm = FakeListChatModel(responses=[response]) + if llm_cache := get_llm_cache(): await llm_cache.aupdate( prompt=dumps(prompt), llm_string=llm._get_llm_string(), @@ -143,14 +165,13 @@ async def test_chat_model_caching() -> None: ) -async def test_chat_model_caching_params() -> None: +def test_chat_model_caching_params() -> None: prompt: List[BaseMessage] = [HumanMessage(content="How are you?")] response = "Test response" cached_response = "Cached test response" cached_message = AIMessage(content=cached_response) llm = FakeListChatModel(responses=[response]) if llm_cache := get_llm_cache(): - # sync test llm_cache.update( prompt=dumps(prompt), llm_string=llm._get_llm_string(functions=[]), @@ -162,8 +183,20 @@ async def test_chat_model_caching_params() -> None: assert result.content == cached_response assert isinstance(result_no_params, AIMessage) assert result_no_params.content == response + else: + raise ValueError( + "The cache not set. This should never happen, as the pytest fixture " + "`set_cache_and_teardown` always sets the cache." + ) - # async test + +async def test_chat_model_caching_params_async() -> None: + prompt: List[BaseMessage] = [HumanMessage(content="How are you?")] + response = "Test response" + cached_response = "Cached test response" + cached_message = AIMessage(content=cached_response) + llm = FakeListChatModel(responses=[response]) + if llm_cache := get_llm_cache(): await llm_cache.aupdate( prompt=dumps(prompt), llm_string=llm._get_llm_string(functions=[]), @@ -182,13 +215,12 @@ async def test_chat_model_caching_params() -> None: ) -async def test_llm_cache_clear() -> None: +def test_llm_cache_clear() -> None: prompt = "How are you?" expected_response = "Test response" cached_response = "Cached test response" llm = FakeListLLM(responses=[expected_response]) if llm_cache := get_llm_cache(): - # sync test llm_cache.update( prompt=prompt, llm_string=create_llm_string(llm), @@ -197,8 +229,19 @@ async def test_llm_cache_clear() -> None: llm_cache.clear() response = llm.invoke(prompt) assert response == expected_response + else: + raise ValueError( + "The cache not set. This should never happen, as the pytest fixture " + "`set_cache_and_teardown` always sets the cache." + ) - # async test + +async def test_llm_cache_clear_async() -> None: + prompt = "How are you?" + expected_response = "Test response" + cached_response = "Cached test response" + llm = FakeListLLM(responses=[expected_response]) + if llm_cache := get_llm_cache(): await llm_cache.aupdate( prompt=prompt, llm_string=create_llm_string(llm), diff --git a/libs/community/tests/unit_tests/test_dependencies.py b/libs/community/tests/unit_tests/test_dependencies.py index 19938150632..34c0ec79565 100644 --- a/libs/community/tests/unit_tests/test_dependencies.py +++ b/libs/community/tests/unit_tests/test_dependencies.py @@ -76,6 +76,7 @@ def test_test_group_dependencies(uv_conf: Mapping[str, Any]) -> None: "pytest-socket", "pytest-watcher", "pytest-xdist", + "blockbuster", "responses", "syrupy", "toml", diff --git a/libs/community/tests/unit_tests/vectorstores/test_faiss.py b/libs/community/tests/unit_tests/vectorstores/test_faiss.py index 739bd243f0e..8a7fb9c74e7 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_faiss.py +++ b/libs/community/tests/unit_tests/vectorstores/test_faiss.py @@ -1589,10 +1589,10 @@ def test_faiss_local_save_load() -> None: @pytest.mark.requires("faiss") -async def test_faiss_async_local_save_load() -> None: +def test_faiss_async_local_save_load() -> None: """Test end to end serialization.""" texts = ["foo", "bar", "baz"] - docsearch = await FAISS.afrom_texts(texts, FakeEmbeddings()) + docsearch = FAISS.from_texts(texts, FakeEmbeddings()) temp_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S") with tempfile.TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder: docsearch.save_local(temp_folder) diff --git a/libs/community/tests/unit_tests/vectorstores/test_inmemory.py b/libs/community/tests/unit_tests/vectorstores/test_inmemory.py index a2bebcd4269..cf31210c893 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_inmemory.py +++ b/libs/community/tests/unit_tests/vectorstores/test_inmemory.py @@ -72,17 +72,17 @@ async def test_inmemory_mmr() -> None: assert output[1] == _AnyDocument(page_content="foy") -async def test_inmemory_dump_load(tmp_path: Path) -> None: +def test_inmemory_dump_load(tmp_path: Path) -> None: """Test end to end construction and search.""" embedding = ConsistentFakeEmbeddings() - store = await InMemoryVectorStore.afrom_texts(["foo", "bar", "baz"], embedding) - output = await store.asimilarity_search("foo", k=1) + store = InMemoryVectorStore.from_texts(["foo", "bar", "baz"], embedding) + output = store.similarity_search("foo", k=1) test_file = str(tmp_path / "test.json") store.dump(test_file) loaded_store = InMemoryVectorStore.load(test_file, embedding) - loaded_output = await loaded_store.asimilarity_search("foo", k=1) + loaded_output = loaded_store.similarity_search("foo", k=1) assert output == loaded_output diff --git a/libs/community/uv.lock b/libs/community/uv.lock index dadf5005f29..fb52e01d707 100644 --- a/libs/community/uv.lock +++ b/libs/community/uv.lock @@ -284,6 +284,18 @@ css = [ { name = "tinycss2" }, ] +[[package]] +name = "blockbuster" +version = "1.5.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "forbiddenfruit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/77/a46b97dc6807c88c864a134793d7c7b915dea45c7e44da6c3adebac90501/blockbuster-1.5.14.tar.gz", hash = "sha256:d77ed3b931b058b4e746f65e32ea21e8ed21a4ef0ca88b7bb046bdb057e1adb0", size = 50191 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c2/1515ea61aa08f3b44882aa59a0c03be667a6fec2a4026aad76944b40b030/blockbuster-1.5.14-py3-none-any.whl", hash = "sha256:5b5e46ac4b5f5d2a7a599944d83bee0c9eb46509868acb6d8fbc7c8058769aaf", size = 12372 }, +] + [[package]] name = "certifi" version = "2025.1.31" @@ -819,6 +831,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924 }, ] +[[package]] +name = "forbiddenfruit" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/79/d4f20e91327c98096d605646bdc6a5ffedae820f38d378d3515c42ec5e60/forbiddenfruit-0.1.4.tar.gz", hash = "sha256:e3f7e66561a29ae129aac139a85d610dbf3dd896128187ed5454b6421f624253", size = 43756 } + [[package]] name = "fqdn" version = "1.5.1" @@ -1617,6 +1635,7 @@ lint = [ { name = "ruff" }, ] test = [ + { name = "blockbuster" }, { name = "cffi", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "cffi", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "duckdb-engine" }, @@ -1687,6 +1706,7 @@ lint = [ { name = "ruff", specifier = ">=0.5,<0.6" }, ] test = [ + { name = "blockbuster", specifier = ">=1.5.13,<1.6" }, { name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" }, { name = "cffi", marker = "python_full_version >= '3.10'" }, { name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },