mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-23 15:19:33 +00:00
community: Use Blockbuster to detect blocking calls in asyncio during tests (#29609)
Same as https://github.com/langchain-ai/langchain/pull/29043 for langchain-community. **Dependencies:** - blockbuster (test) **Twitter handle:** cbornet_ Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
parent
3a57a28daa
commit
30f6c9f5c8
@ -1,10 +1,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
from importlib.metadata import version
|
from importlib.metadata import version
|
||||||
|
|
||||||
from packaging.version import parse
|
from packaging.version import parse
|
||||||
|
|
||||||
|
|
||||||
|
@functools.cache
|
||||||
def is_openai_v1() -> bool:
|
def is_openai_v1() -> bool:
|
||||||
"""Return whether OpenAI API is v1 or more."""
|
"""Return whether OpenAI API is v1 or more."""
|
||||||
_version = parse(version("openai"))
|
_version = parse(version("openai"))
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "pdm.backend"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
authors = []
|
authors = []
|
||||||
license = {text = "MIT"}
|
license = { text = "MIT" }
|
||||||
requires-python = "<4.0,>=3.9"
|
requires-python = "<4.0,>=3.9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"langchain-core<1.0.0,>=0.3.34",
|
"langchain-core<1.0.0,>=0.3.34",
|
||||||
@ -48,6 +48,7 @@ test = [
|
|||||||
"syrupy<5.0.0,>=4.0.2",
|
"syrupy<5.0.0,>=4.0.2",
|
||||||
"requests-mock<2.0.0,>=1.11.0",
|
"requests-mock<2.0.0,>=1.11.0",
|
||||||
"pytest-xdist<4.0.0,>=3.6.1",
|
"pytest-xdist<4.0.0,>=3.6.1",
|
||||||
|
"blockbuster<1.6,>=1.5.13",
|
||||||
"cffi<1.17.1; python_version < \"3.10\"",
|
"cffi<1.17.1; python_version < \"3.10\"",
|
||||||
"cffi; python_version >= \"3.10\"",
|
"cffi; python_version >= \"3.10\"",
|
||||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||||
@ -55,13 +56,8 @@ test = [
|
|||||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||||
"toml>=0.10.2",
|
"toml>=0.10.2",
|
||||||
]
|
]
|
||||||
codespell = [
|
codespell = ["codespell<3.0.0,>=2.2.0"]
|
||||||
"codespell<3.0.0,>=2.2.0",
|
test_integration = ["pytest-vcr<2.0.0,>=1.0.2", "vcrpy<7,>=6"]
|
||||||
]
|
|
||||||
test_integration = [
|
|
||||||
"pytest-vcr<2.0.0,>=1.0.2",
|
|
||||||
"vcrpy<7,>=6",
|
|
||||||
]
|
|
||||||
lint = [
|
lint = [
|
||||||
"ruff<0.6,>=0.5",
|
"ruff<0.6,>=0.5",
|
||||||
"cffi<1.17.1; python_version < \"3.10\"",
|
"cffi<1.17.1; python_version < \"3.10\"",
|
||||||
|
@ -1,12 +1,28 @@
|
|||||||
"""Configuration for unit tests."""
|
"""Configuration for unit tests."""
|
||||||
|
|
||||||
|
from collections.abc import Iterator
|
||||||
from importlib import util
|
from importlib import util
|
||||||
from typing import Dict, Sequence
|
from typing import Dict, Sequence
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from blockbuster import blockbuster_ctx
|
||||||
from pytest import Config, Function, Parser
|
from pytest import Config, Function, Parser
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def blockbuster() -> Iterator[None]:
|
||||||
|
with blockbuster_ctx("langchain_community") as bb:
|
||||||
|
(
|
||||||
|
bb.functions["os.stat"]
|
||||||
|
.can_block_in("langchain_community/utils/openai.py", "is_openai_v1")
|
||||||
|
.can_block_in("httpx/_client.py", "_init_transport")
|
||||||
|
)
|
||||||
|
bb.functions["os.path.abspath"].can_block_in(
|
||||||
|
"sqlalchemy/dialects/sqlite/pysqlite.py", "create_connect_args"
|
||||||
|
)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser: Parser) -> None:
|
def pytest_addoption(parser: Parser) -> None:
|
||||||
"""Add custom command line options to pytest."""
|
"""Add custom command line options to pytest."""
|
||||||
parser.addoption(
|
parser.addoption(
|
||||||
|
@ -62,20 +62,31 @@ def set_cache_and_teardown(request: FixtureRequest) -> Generator[None, None, Non
|
|||||||
raise ValueError("Cache not set. This should never happen.")
|
raise ValueError("Cache not set. This should never happen.")
|
||||||
|
|
||||||
|
|
||||||
async def test_llm_caching() -> None:
|
def test_llm_caching() -> None:
|
||||||
prompt = "How are you?"
|
prompt = "How are you?"
|
||||||
response = "Test response"
|
response = "Test response"
|
||||||
cached_response = "Cached test response"
|
cached_response = "Cached test response"
|
||||||
llm = FakeListLLM(responses=[response])
|
llm = FakeListLLM(responses=[response])
|
||||||
if llm_cache := get_llm_cache():
|
if llm_cache := get_llm_cache():
|
||||||
# sync test
|
|
||||||
llm_cache.update(
|
llm_cache.update(
|
||||||
prompt=prompt,
|
prompt=prompt,
|
||||||
llm_string=create_llm_string(llm),
|
llm_string=create_llm_string(llm),
|
||||||
return_val=[Generation(text=cached_response)],
|
return_val=[Generation(text=cached_response)],
|
||||||
)
|
)
|
||||||
assert llm.invoke(prompt) == cached_response
|
assert llm.invoke(prompt) == cached_response
|
||||||
# async test
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"The cache not set. This should never happen, as the pytest fixture "
|
||||||
|
"`set_cache_and_teardown` always sets the cache."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_llm_caching_async() -> None:
|
||||||
|
prompt = "How are you?"
|
||||||
|
response = "Test response"
|
||||||
|
cached_response = "Cached test response"
|
||||||
|
llm = FakeListLLM(responses=[response])
|
||||||
|
if llm_cache := get_llm_cache():
|
||||||
await llm_cache.aupdate(
|
await llm_cache.aupdate(
|
||||||
prompt=prompt,
|
prompt=prompt,
|
||||||
llm_string=create_llm_string(llm),
|
llm_string=create_llm_string(llm),
|
||||||
@ -110,14 +121,13 @@ def test_old_sqlite_llm_caching() -> None:
|
|||||||
assert llm.invoke(prompt) == cached_response
|
assert llm.invoke(prompt) == cached_response
|
||||||
|
|
||||||
|
|
||||||
async def test_chat_model_caching() -> None:
|
def test_chat_model_caching() -> None:
|
||||||
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
||||||
response = "Test response"
|
response = "Test response"
|
||||||
cached_response = "Cached test response"
|
cached_response = "Cached test response"
|
||||||
cached_message = AIMessage(content=cached_response)
|
cached_message = AIMessage(content=cached_response)
|
||||||
llm = FakeListChatModel(responses=[response])
|
llm = FakeListChatModel(responses=[response])
|
||||||
if llm_cache := get_llm_cache():
|
if llm_cache := get_llm_cache():
|
||||||
# sync test
|
|
||||||
llm_cache.update(
|
llm_cache.update(
|
||||||
prompt=dumps(prompt),
|
prompt=dumps(prompt),
|
||||||
llm_string=llm._get_llm_string(),
|
llm_string=llm._get_llm_string(),
|
||||||
@ -126,8 +136,20 @@ async def test_chat_model_caching() -> None:
|
|||||||
result = llm.invoke(prompt)
|
result = llm.invoke(prompt)
|
||||||
assert isinstance(result, AIMessage)
|
assert isinstance(result, AIMessage)
|
||||||
assert result.content == cached_response
|
assert result.content == cached_response
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"The cache not set. This should never happen, as the pytest fixture "
|
||||||
|
"`set_cache_and_teardown` always sets the cache."
|
||||||
|
)
|
||||||
|
|
||||||
# async test
|
|
||||||
|
async def test_chat_model_caching_async() -> None:
|
||||||
|
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
||||||
|
response = "Test response"
|
||||||
|
cached_response = "Cached test response"
|
||||||
|
cached_message = AIMessage(content=cached_response)
|
||||||
|
llm = FakeListChatModel(responses=[response])
|
||||||
|
if llm_cache := get_llm_cache():
|
||||||
await llm_cache.aupdate(
|
await llm_cache.aupdate(
|
||||||
prompt=dumps(prompt),
|
prompt=dumps(prompt),
|
||||||
llm_string=llm._get_llm_string(),
|
llm_string=llm._get_llm_string(),
|
||||||
@ -143,14 +165,13 @@ async def test_chat_model_caching() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_chat_model_caching_params() -> None:
|
def test_chat_model_caching_params() -> None:
|
||||||
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
||||||
response = "Test response"
|
response = "Test response"
|
||||||
cached_response = "Cached test response"
|
cached_response = "Cached test response"
|
||||||
cached_message = AIMessage(content=cached_response)
|
cached_message = AIMessage(content=cached_response)
|
||||||
llm = FakeListChatModel(responses=[response])
|
llm = FakeListChatModel(responses=[response])
|
||||||
if llm_cache := get_llm_cache():
|
if llm_cache := get_llm_cache():
|
||||||
# sync test
|
|
||||||
llm_cache.update(
|
llm_cache.update(
|
||||||
prompt=dumps(prompt),
|
prompt=dumps(prompt),
|
||||||
llm_string=llm._get_llm_string(functions=[]),
|
llm_string=llm._get_llm_string(functions=[]),
|
||||||
@ -162,8 +183,20 @@ async def test_chat_model_caching_params() -> None:
|
|||||||
assert result.content == cached_response
|
assert result.content == cached_response
|
||||||
assert isinstance(result_no_params, AIMessage)
|
assert isinstance(result_no_params, AIMessage)
|
||||||
assert result_no_params.content == response
|
assert result_no_params.content == response
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"The cache not set. This should never happen, as the pytest fixture "
|
||||||
|
"`set_cache_and_teardown` always sets the cache."
|
||||||
|
)
|
||||||
|
|
||||||
# async test
|
|
||||||
|
async def test_chat_model_caching_params_async() -> None:
|
||||||
|
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
|
||||||
|
response = "Test response"
|
||||||
|
cached_response = "Cached test response"
|
||||||
|
cached_message = AIMessage(content=cached_response)
|
||||||
|
llm = FakeListChatModel(responses=[response])
|
||||||
|
if llm_cache := get_llm_cache():
|
||||||
await llm_cache.aupdate(
|
await llm_cache.aupdate(
|
||||||
prompt=dumps(prompt),
|
prompt=dumps(prompt),
|
||||||
llm_string=llm._get_llm_string(functions=[]),
|
llm_string=llm._get_llm_string(functions=[]),
|
||||||
@ -182,13 +215,12 @@ async def test_chat_model_caching_params() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_llm_cache_clear() -> None:
|
def test_llm_cache_clear() -> None:
|
||||||
prompt = "How are you?"
|
prompt = "How are you?"
|
||||||
expected_response = "Test response"
|
expected_response = "Test response"
|
||||||
cached_response = "Cached test response"
|
cached_response = "Cached test response"
|
||||||
llm = FakeListLLM(responses=[expected_response])
|
llm = FakeListLLM(responses=[expected_response])
|
||||||
if llm_cache := get_llm_cache():
|
if llm_cache := get_llm_cache():
|
||||||
# sync test
|
|
||||||
llm_cache.update(
|
llm_cache.update(
|
||||||
prompt=prompt,
|
prompt=prompt,
|
||||||
llm_string=create_llm_string(llm),
|
llm_string=create_llm_string(llm),
|
||||||
@ -197,8 +229,19 @@ async def test_llm_cache_clear() -> None:
|
|||||||
llm_cache.clear()
|
llm_cache.clear()
|
||||||
response = llm.invoke(prompt)
|
response = llm.invoke(prompt)
|
||||||
assert response == expected_response
|
assert response == expected_response
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"The cache not set. This should never happen, as the pytest fixture "
|
||||||
|
"`set_cache_and_teardown` always sets the cache."
|
||||||
|
)
|
||||||
|
|
||||||
# async test
|
|
||||||
|
async def test_llm_cache_clear_async() -> None:
|
||||||
|
prompt = "How are you?"
|
||||||
|
expected_response = "Test response"
|
||||||
|
cached_response = "Cached test response"
|
||||||
|
llm = FakeListLLM(responses=[expected_response])
|
||||||
|
if llm_cache := get_llm_cache():
|
||||||
await llm_cache.aupdate(
|
await llm_cache.aupdate(
|
||||||
prompt=prompt,
|
prompt=prompt,
|
||||||
llm_string=create_llm_string(llm),
|
llm_string=create_llm_string(llm),
|
||||||
|
@ -76,6 +76,7 @@ def test_test_group_dependencies(uv_conf: Mapping[str, Any]) -> None:
|
|||||||
"pytest-socket",
|
"pytest-socket",
|
||||||
"pytest-watcher",
|
"pytest-watcher",
|
||||||
"pytest-xdist",
|
"pytest-xdist",
|
||||||
|
"blockbuster",
|
||||||
"responses",
|
"responses",
|
||||||
"syrupy",
|
"syrupy",
|
||||||
"toml",
|
"toml",
|
||||||
|
@ -1589,10 +1589,10 @@ def test_faiss_local_save_load() -> None:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires("faiss")
|
@pytest.mark.requires("faiss")
|
||||||
async def test_faiss_async_local_save_load() -> None:
|
def test_faiss_async_local_save_load() -> None:
|
||||||
"""Test end to end serialization."""
|
"""Test end to end serialization."""
|
||||||
texts = ["foo", "bar", "baz"]
|
texts = ["foo", "bar", "baz"]
|
||||||
docsearch = await FAISS.afrom_texts(texts, FakeEmbeddings())
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
||||||
temp_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
temp_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
||||||
with tempfile.TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder:
|
with tempfile.TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder:
|
||||||
docsearch.save_local(temp_folder)
|
docsearch.save_local(temp_folder)
|
||||||
|
@ -72,17 +72,17 @@ async def test_inmemory_mmr() -> None:
|
|||||||
assert output[1] == _AnyDocument(page_content="foy")
|
assert output[1] == _AnyDocument(page_content="foy")
|
||||||
|
|
||||||
|
|
||||||
async def test_inmemory_dump_load(tmp_path: Path) -> None:
|
def test_inmemory_dump_load(tmp_path: Path) -> None:
|
||||||
"""Test end to end construction and search."""
|
"""Test end to end construction and search."""
|
||||||
embedding = ConsistentFakeEmbeddings()
|
embedding = ConsistentFakeEmbeddings()
|
||||||
store = await InMemoryVectorStore.afrom_texts(["foo", "bar", "baz"], embedding)
|
store = InMemoryVectorStore.from_texts(["foo", "bar", "baz"], embedding)
|
||||||
output = await store.asimilarity_search("foo", k=1)
|
output = store.similarity_search("foo", k=1)
|
||||||
|
|
||||||
test_file = str(tmp_path / "test.json")
|
test_file = str(tmp_path / "test.json")
|
||||||
store.dump(test_file)
|
store.dump(test_file)
|
||||||
|
|
||||||
loaded_store = InMemoryVectorStore.load(test_file, embedding)
|
loaded_store = InMemoryVectorStore.load(test_file, embedding)
|
||||||
loaded_output = await loaded_store.asimilarity_search("foo", k=1)
|
loaded_output = loaded_store.similarity_search("foo", k=1)
|
||||||
|
|
||||||
assert output == loaded_output
|
assert output == loaded_output
|
||||||
|
|
||||||
|
@ -284,6 +284,18 @@ css = [
|
|||||||
{ name = "tinycss2" },
|
{ name = "tinycss2" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "blockbuster"
|
||||||
|
version = "1.5.14"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "forbiddenfruit" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e0/77/a46b97dc6807c88c864a134793d7c7b915dea45c7e44da6c3adebac90501/blockbuster-1.5.14.tar.gz", hash = "sha256:d77ed3b931b058b4e746f65e32ea21e8ed21a4ef0ca88b7bb046bdb057e1adb0", size = 50191 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/c2/1515ea61aa08f3b44882aa59a0c03be667a6fec2a4026aad76944b40b030/blockbuster-1.5.14-py3-none-any.whl", hash = "sha256:5b5e46ac4b5f5d2a7a599944d83bee0c9eb46509868acb6d8fbc7c8058769aaf", size = 12372 },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2025.1.31"
|
version = "2025.1.31"
|
||||||
@ -819,6 +831,12 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924 },
|
{ url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924 },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "forbiddenfruit"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e6/79/d4f20e91327c98096d605646bdc6a5ffedae820f38d378d3515c42ec5e60/forbiddenfruit-0.1.4.tar.gz", hash = "sha256:e3f7e66561a29ae129aac139a85d610dbf3dd896128187ed5454b6421f624253", size = 43756 }
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fqdn"
|
name = "fqdn"
|
||||||
version = "1.5.1"
|
version = "1.5.1"
|
||||||
@ -1617,6 +1635,7 @@ lint = [
|
|||||||
{ name = "ruff" },
|
{ name = "ruff" },
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
|
{ name = "blockbuster" },
|
||||||
{ name = "cffi", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
{ name = "cffi", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||||
{ name = "cffi", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
{ name = "cffi", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||||
{ name = "duckdb-engine" },
|
{ name = "duckdb-engine" },
|
||||||
@ -1687,6 +1706,7 @@ lint = [
|
|||||||
{ name = "ruff", specifier = ">=0.5,<0.6" },
|
{ name = "ruff", specifier = ">=0.5,<0.6" },
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
|
{ name = "blockbuster", specifier = ">=1.5.13,<1.6" },
|
||||||
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
{ name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" },
|
||||||
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
{ name = "cffi", marker = "python_full_version >= '3.10'" },
|
||||||
{ name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },
|
{ name = "duckdb-engine", specifier = ">=0.13.6,<1.0.0" },
|
||||||
|
Loading…
Reference in New Issue
Block a user