diff --git a/libs/community/tests/integration_tests/agent/test_ainetwork_agent.py b/libs/community/tests/integration_tests/agent/test_ainetwork_agent.py index e9fe03c148b..bc93af42509 100644 --- a/libs/community/tests/integration_tests/agent/test_ainetwork_agent.py +++ b/libs/community/tests/integration_tests/agent/test_ainetwork_agent.py @@ -8,12 +8,12 @@ from typing import Any from urllib.error import HTTPError import pytest +from langchain.agents import AgentType, initialize_agent + from langchain_community.agent_toolkits.ainetwork.toolkit import AINetworkToolkit from langchain_community.chat_models import ChatOpenAI from langchain_community.tools.ainetwork.utils import authenticate -from langchain.agents import AgentType, initialize_agent - class Match(Enum): __test__ = False diff --git a/libs/community/tests/integration_tests/agent/test_powerbi_agent.py b/libs/community/tests/integration_tests/agent/test_powerbi_agent.py index 1910d28d2cd..7f16ce8d2e1 100644 --- a/libs/community/tests/integration_tests/agent/test_powerbi_agent.py +++ b/libs/community/tests/integration_tests/agent/test_powerbi_agent.py @@ -1,8 +1,9 @@ import pytest +from langchain_core.utils import get_from_env + from langchain_community.agent_toolkits import PowerBIToolkit, create_pbi_agent from langchain_community.chat_models import ChatOpenAI from langchain_community.utilities.powerbi import PowerBIDataset -from langchain_core.utils import get_from_env def azure_installed() -> bool: diff --git a/libs/community/tests/integration_tests/cache/test_astradb.py b/libs/community/tests/integration_tests/cache/test_astradb.py index 43096cc9e7f..f8807677978 100644 --- a/libs/community/tests/integration_tests/cache/test_astradb.py +++ b/libs/community/tests/integration_tests/cache/test_astradb.py @@ -15,13 +15,13 @@ import os from typing import AsyncIterator, Iterator import pytest -from langchain_community.cache import AstraDBCache, AstraDBSemanticCache -from langchain_community.utilities.astradb import SetupMode +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.caches import BaseCache from langchain_core.language_models import LLM from langchain_core.outputs import Generation, LLMResult -from langchain.globals import get_llm_cache, set_llm_cache +from langchain_community.cache import AstraDBCache, AstraDBSemanticCache +from langchain_community.utilities.astradb import SetupMode from tests.integration_tests.cache.fake_embeddings import FakeEmbeddings from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/community/tests/integration_tests/cache/test_azure_cosmosdb_cache.py b/libs/community/tests/integration_tests/cache/test_azure_cosmosdb_cache.py index b068b3d900c..d6eb6fc43e9 100644 --- a/libs/community/tests/integration_tests/cache/test_azure_cosmosdb_cache.py +++ b/libs/community/tests/integration_tests/cache/test_azure_cosmosdb_cache.py @@ -10,14 +10,14 @@ import os import uuid import pytest +from langchain.globals import get_llm_cache, set_llm_cache +from langchain_core.outputs import Generation + from langchain_community.cache import AzureCosmosDBSemanticCache from langchain_community.vectorstores.azure_cosmos_db import ( CosmosDBSimilarityType, CosmosDBVectorSearchType, ) -from langchain_core.outputs import Generation - -from langchain.globals import get_llm_cache, set_llm_cache from tests.integration_tests.cache.fake_embeddings import ( FakeEmbeddings, ) diff --git a/libs/community/tests/integration_tests/cache/test_cassandra.py b/libs/community/tests/integration_tests/cache/test_cassandra.py index c18877b444a..44308db1bf5 100644 --- a/libs/community/tests/integration_tests/cache/test_cassandra.py +++ b/libs/community/tests/integration_tests/cache/test_cassandra.py @@ -5,11 +5,11 @@ import time from typing import Any, Iterator, Tuple import pytest -from langchain_community.cache import CassandraCache, CassandraSemanticCache -from langchain_community.utilities.cassandra import SetupMode +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.outputs import Generation, LLMResult -from langchain.globals import get_llm_cache, set_llm_cache +from langchain_community.cache import CassandraCache, CassandraSemanticCache +from langchain_community.utilities.cassandra import SetupMode from tests.integration_tests.cache.fake_embeddings import FakeEmbeddings from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/community/tests/integration_tests/cache/test_gptcache.py b/libs/community/tests/integration_tests/cache/test_gptcache.py index 8f7aa82e8af..4126dd9e3d9 100644 --- a/libs/community/tests/integration_tests/cache/test_gptcache.py +++ b/libs/community/tests/integration_tests/cache/test_gptcache.py @@ -2,10 +2,10 @@ import os from typing import Any, Callable, Union import pytest -from langchain_community.cache import GPTCache +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.outputs import Generation -from langchain.globals import get_llm_cache, set_llm_cache +from langchain_community.cache import GPTCache from tests.unit_tests.llms.fake_llm import FakeLLM try: diff --git a/libs/community/tests/integration_tests/cache/test_momento_cache.py b/libs/community/tests/integration_tests/cache/test_momento_cache.py index 8605182ba1a..6e4b41e3ce7 100644 --- a/libs/community/tests/integration_tests/cache/test_momento_cache.py +++ b/libs/community/tests/integration_tests/cache/test_momento_cache.py @@ -11,10 +11,10 @@ from datetime import timedelta from typing import Iterator import pytest -from langchain_community.cache import MomentoCache +from langchain.globals import set_llm_cache from langchain_core.outputs import Generation, LLMResult -from langchain.globals import set_llm_cache +from langchain_community.cache import MomentoCache from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/community/tests/integration_tests/cache/test_opensearch_cache.py b/libs/community/tests/integration_tests/cache/test_opensearch_cache.py index eda3297e86e..db203bbb39d 100644 --- a/libs/community/tests/integration_tests/cache/test_opensearch_cache.py +++ b/libs/community/tests/integration_tests/cache/test_opensearch_cache.py @@ -1,7 +1,7 @@ -from langchain_community.cache import OpenSearchSemanticCache +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.outputs import Generation -from langchain.globals import get_llm_cache, set_llm_cache +from langchain_community.cache import OpenSearchSemanticCache from tests.integration_tests.cache.fake_embeddings import ( FakeEmbeddings, ) diff --git a/libs/community/tests/integration_tests/cache/test_redis_cache.py b/libs/community/tests/integration_tests/cache/test_redis_cache.py index 20ed08e6786..eb9f00da813 100644 --- a/libs/community/tests/integration_tests/cache/test_redis_cache.py +++ b/libs/community/tests/integration_tests/cache/test_redis_cache.py @@ -5,13 +5,13 @@ from contextlib import asynccontextmanager, contextmanager from typing import AsyncGenerator, Generator, List, Optional, cast import pytest -from langchain_community.cache import AsyncRedisCache, RedisCache, RedisSemanticCache +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.embeddings import Embeddings from langchain_core.load.dump import dumps from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain_core.outputs import ChatGeneration, Generation, LLMResult -from langchain.globals import get_llm_cache, set_llm_cache +from langchain_community.cache import AsyncRedisCache, RedisCache, RedisSemanticCache from tests.integration_tests.cache.fake_embeddings import ( ConsistentFakeEmbeddings, FakeEmbeddings, diff --git a/libs/community/tests/integration_tests/cache/test_upstash_redis_cache.py b/libs/community/tests/integration_tests/cache/test_upstash_redis_cache.py index 4352d8a0c2e..dfbf8e9d698 100644 --- a/libs/community/tests/integration_tests/cache/test_upstash_redis_cache.py +++ b/libs/community/tests/integration_tests/cache/test_upstash_redis_cache.py @@ -1,11 +1,11 @@ """Test Upstash Redis cache functionality.""" import uuid +import langchain import pytest -from langchain_community.cache import UpstashRedisCache from langchain_core.outputs import Generation, LLMResult -import langchain +from langchain_community.cache import UpstashRedisCache from tests.unit_tests.llms.fake_chat_model import FakeChatModel from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/community/tests/integration_tests/chains/test_dalle_agent.py b/libs/community/tests/integration_tests/chains/test_dalle_agent.py index 70e6b469413..f393522ab69 100644 --- a/libs/community/tests/integration_tests/chains/test_dalle_agent.py +++ b/libs/community/tests/integration_tests/chains/test_dalle_agent.py @@ -1,9 +1,9 @@ """Integration test for Dall-E image generator agent.""" +from langchain.agents import AgentType, initialize_agent + from langchain_community.agent_toolkits.load_tools import load_tools from langchain_community.llms import OpenAI -from langchain.agents import AgentType, initialize_agent - def test_call() -> None: """Test that the agent runs and returns output.""" diff --git a/libs/community/tests/integration_tests/chains/test_graph_database.py b/libs/community/tests/integration_tests/chains/test_graph_database.py index 915df46e1c8..91a46a579d8 100644 --- a/libs/community/tests/integration_tests/chains/test_graph_database.py +++ b/libs/community/tests/integration_tests/chains/test_graph_database.py @@ -1,12 +1,12 @@ """Test Graph Database Chain.""" import os +from langchain.chains.loading import load_chain + from langchain_community.chains.graph_qa.cypher import GraphCypherQAChain from langchain_community.graphs import Neo4jGraph from langchain_community.llms.openai import OpenAI -from langchain.chains.loading import load_chain - def test_connect_neo4j() -> None: """Test that Neo4j database is correctly instantiated and connected.""" diff --git a/libs/community/tests/integration_tests/chains/test_graph_database_sparql.py b/libs/community/tests/integration_tests/chains/test_graph_database_sparql.py index 345c47dcefb..2b2a3b676fc 100644 --- a/libs/community/tests/integration_tests/chains/test_graph_database_sparql.py +++ b/libs/community/tests/integration_tests/chains/test_graph_database_sparql.py @@ -3,11 +3,11 @@ import pathlib import re from unittest.mock import MagicMock, Mock +from langchain.chains import LLMChain + from langchain_community.chains.graph_qa.sparql import GraphSparqlQAChain from langchain_community.graphs import RdfGraph -from langchain.chains import LLMChain - """ cd libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb ./start.sh diff --git a/libs/community/tests/integration_tests/chains/test_ontotext_graphdb_qa.py b/libs/community/tests/integration_tests/chains/test_ontotext_graphdb_qa.py index 4cc595228d9..34955b66431 100644 --- a/libs/community/tests/integration_tests/chains/test_ontotext_graphdb_qa.py +++ b/libs/community/tests/integration_tests/chains/test_ontotext_graphdb_qa.py @@ -1,11 +1,11 @@ from unittest.mock import MagicMock, Mock import pytest +from langchain.chains import LLMChain + from langchain_community.chains.graph_qa.ontotext_graphdb import OntotextGraphDBQAChain from langchain_community.graphs import OntotextGraphDBGraph -from langchain.chains import LLMChain - """ cd libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb ./start.sh diff --git a/libs/community/tests/integration_tests/chains/test_react.py b/libs/community/tests/integration_tests/chains/test_react.py index 203d9e6a223..a1fc7ba83e2 100644 --- a/libs/community/tests/integration_tests/chains/test_react.py +++ b/libs/community/tests/integration_tests/chains/test_react.py @@ -1,10 +1,10 @@ """Integration test for self ask with search.""" +from langchain.agents.react.base import ReActChain + from langchain_community.docstore import Wikipedia from langchain_community.llms.openai import OpenAI -from langchain.agents.react.base import ReActChain - def test_react() -> None: """Test functionality on a prompt.""" diff --git a/libs/community/tests/integration_tests/chains/test_retrieval_qa.py b/libs/community/tests/integration_tests/chains/test_retrieval_qa.py index eb7e2811368..16091aa4b7c 100644 --- a/libs/community/tests/integration_tests/chains/test_retrieval_qa.py +++ b/libs/community/tests/integration_tests/chains/test_retrieval_qa.py @@ -1,14 +1,14 @@ """Test RetrievalQA functionality.""" from pathlib import Path +from langchain.chains import RetrievalQA +from langchain.chains.loading import load_chain +from langchain_text_splitters.character import CharacterTextSplitter + from langchain_community.document_loaders import TextLoader from langchain_community.embeddings.openai import OpenAIEmbeddings from langchain_community.llms import OpenAI from langchain_community.vectorstores import FAISS -from langchain_text_splitters.character import CharacterTextSplitter - -from langchain.chains import RetrievalQA -from langchain.chains.loading import load_chain def test_retrieval_qa_saving_loading(tmp_path: Path) -> None: diff --git a/libs/community/tests/integration_tests/chains/test_retrieval_qa_with_sources.py b/libs/community/tests/integration_tests/chains/test_retrieval_qa_with_sources.py index c8752cd7e7a..f3c1661fce9 100644 --- a/libs/community/tests/integration_tests/chains/test_retrieval_qa_with_sources.py +++ b/libs/community/tests/integration_tests/chains/test_retrieval_qa_with_sources.py @@ -1,12 +1,12 @@ """Test RetrievalQA functionality.""" +from langchain.chains import RetrievalQAWithSourcesChain +from langchain.chains.loading import load_chain +from langchain_text_splitters.character import CharacterTextSplitter + from langchain_community.document_loaders import DirectoryLoader from langchain_community.embeddings.openai import OpenAIEmbeddings from langchain_community.llms import OpenAI from langchain_community.vectorstores import FAISS -from langchain_text_splitters.character import CharacterTextSplitter - -from langchain.chains import RetrievalQAWithSourcesChain -from langchain.chains.loading import load_chain def test_retrieval_qa_with_sources_chain_saving_loading(tmp_path: str) -> None: diff --git a/libs/community/tests/integration_tests/chains/test_self_ask_with_search.py b/libs/community/tests/integration_tests/chains/test_self_ask_with_search.py index 5cf0f93b8cb..543790e14a8 100644 --- a/libs/community/tests/integration_tests/chains/test_self_ask_with_search.py +++ b/libs/community/tests/integration_tests/chains/test_self_ask_with_search.py @@ -1,9 +1,9 @@ """Integration test for self ask with search.""" +from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain + from langchain_community.llms.openai import OpenAI from langchain_community.utilities.searchapi import SearchApiAPIWrapper -from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain - def test_self_ask_with_search() -> None: """Test functionality on a prompt.""" diff --git a/libs/community/tests/integration_tests/memory/test_astradb.py b/libs/community/tests/integration_tests/memory/test_astradb.py index 4caf39985ec..0728535d80a 100644 --- a/libs/community/tests/integration_tests/memory/test_astradb.py +++ b/libs/community/tests/integration_tests/memory/test_astradb.py @@ -2,13 +2,13 @@ import os from typing import AsyncIterable, Iterable import pytest +from langchain.memory import ConversationBufferMemory +from langchain_core.messages import AIMessage, HumanMessage + from langchain_community.chat_message_histories.astradb import ( AstraDBChatMessageHistory, ) from langchain_community.utilities.astradb import SetupMode -from langchain_core.messages import AIMessage, HumanMessage - -from langchain.memory import ConversationBufferMemory def _has_env_vars() -> bool: diff --git a/libs/community/tests/integration_tests/memory/test_cassandra.py b/libs/community/tests/integration_tests/memory/test_cassandra.py index 668cc87a440..6ff03ba6e77 100644 --- a/libs/community/tests/integration_tests/memory/test_cassandra.py +++ b/libs/community/tests/integration_tests/memory/test_cassandra.py @@ -2,12 +2,12 @@ import os import time from typing import Optional +from langchain.memory import ConversationBufferMemory +from langchain_core.messages import AIMessage, HumanMessage + from langchain_community.chat_message_histories.cassandra import ( CassandraChatMessageHistory, ) -from langchain_core.messages import AIMessage, HumanMessage - -from langchain.memory import ConversationBufferMemory def _chat_message_history( diff --git a/libs/community/tests/integration_tests/memory/test_cosmos_db.py b/libs/community/tests/integration_tests/memory/test_cosmos_db.py index c2c085144c7..94ca42af1c3 100644 --- a/libs/community/tests/integration_tests/memory/test_cosmos_db.py +++ b/libs/community/tests/integration_tests/memory/test_cosmos_db.py @@ -1,10 +1,10 @@ import json import os -from langchain_community.chat_message_histories import CosmosDBChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import CosmosDBChatMessageHistory # Replace these with your Azure Cosmos DB endpoint and key endpoint = os.environ.get("COSMOS_DB_ENDPOINT", "") diff --git a/libs/community/tests/integration_tests/memory/test_elasticsearch.py b/libs/community/tests/integration_tests/memory/test_elasticsearch.py index 38a5d0635dc..4e4ab4e1358 100644 --- a/libs/community/tests/integration_tests/memory/test_elasticsearch.py +++ b/libs/community/tests/integration_tests/memory/test_elasticsearch.py @@ -4,10 +4,10 @@ import uuid from typing import Generator, Union import pytest -from langchain_community.chat_message_histories import ElasticsearchChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import ElasticsearchChatMessageHistory """ cd tests/integration_tests/memory/docker-compose diff --git a/libs/community/tests/integration_tests/memory/test_firestore.py b/libs/community/tests/integration_tests/memory/test_firestore.py index 5d9fabf4aee..ae4be495c58 100644 --- a/libs/community/tests/integration_tests/memory/test_firestore.py +++ b/libs/community/tests/integration_tests/memory/test_firestore.py @@ -1,9 +1,9 @@ import json -from langchain_community.chat_message_histories import FirestoreChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import FirestoreChatMessageHistory def test_memory_with_message_store() -> None: diff --git a/libs/community/tests/integration_tests/memory/test_momento.py b/libs/community/tests/integration_tests/memory/test_momento.py index b5d0e7f43c2..5f0d4685548 100644 --- a/libs/community/tests/integration_tests/memory/test_momento.py +++ b/libs/community/tests/integration_tests/memory/test_momento.py @@ -10,10 +10,10 @@ from datetime import timedelta from typing import Iterator import pytest -from langchain_community.chat_message_histories import MomentoChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import MomentoChatMessageHistory def random_string() -> str: diff --git a/libs/community/tests/integration_tests/memory/test_mongodb.py b/libs/community/tests/integration_tests/memory/test_mongodb.py index f99242ebee4..384bbb52e72 100644 --- a/libs/community/tests/integration_tests/memory/test_mongodb.py +++ b/libs/community/tests/integration_tests/memory/test_mongodb.py @@ -1,10 +1,10 @@ import json import os -from langchain_community.chat_message_histories import MongoDBChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import MongoDBChatMessageHistory # Replace these with your mongodb connection string connection_string = os.environ.get("MONGODB_CONNECTION_STRING", "") diff --git a/libs/community/tests/integration_tests/memory/test_neo4j.py b/libs/community/tests/integration_tests/memory/test_neo4j.py index 06a49569449..5f1aa954d17 100644 --- a/libs/community/tests/integration_tests/memory/test_neo4j.py +++ b/libs/community/tests/integration_tests/memory/test_neo4j.py @@ -1,9 +1,9 @@ import json -from langchain_community.chat_message_histories import Neo4jChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import Neo4jChatMessageHistory def test_memory_with_message_store() -> None: diff --git a/libs/community/tests/integration_tests/memory/test_redis.py b/libs/community/tests/integration_tests/memory/test_redis.py index cb00d04fa05..22863d1b8f0 100644 --- a/libs/community/tests/integration_tests/memory/test_redis.py +++ b/libs/community/tests/integration_tests/memory/test_redis.py @@ -1,9 +1,9 @@ import json -from langchain_community.chat_message_histories import RedisChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import RedisChatMessageHistory def test_memory_with_message_store() -> None: diff --git a/libs/community/tests/integration_tests/memory/test_rockset.py b/libs/community/tests/integration_tests/memory/test_rockset.py index ce220aea723..21540d462d4 100644 --- a/libs/community/tests/integration_tests/memory/test_rockset.py +++ b/libs/community/tests/integration_tests/memory/test_rockset.py @@ -8,10 +8,10 @@ and ROCKSET_REGION environment variables set. import json import os -from langchain_community.chat_message_histories import RocksetChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import RocksetChatMessageHistory collection_name = "langchain_demo" session_id = "MySession" diff --git a/libs/community/tests/integration_tests/memory/test_singlestoredb.py b/libs/community/tests/integration_tests/memory/test_singlestoredb.py index aae1d5c4178..fbc895455be 100644 --- a/libs/community/tests/integration_tests/memory/test_singlestoredb.py +++ b/libs/community/tests/integration_tests/memory/test_singlestoredb.py @@ -1,9 +1,9 @@ import json -from langchain_community.chat_message_histories import SingleStoreDBChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import SingleStoreDBChatMessageHistory # Replace these with your mongodb connection string TEST_SINGLESTOREDB_URL = "root:pass@localhost:3306/db" diff --git a/libs/community/tests/integration_tests/memory/test_upstash_redis.py b/libs/community/tests/integration_tests/memory/test_upstash_redis.py index f03aa5fca1e..4e6b24362ff 100644 --- a/libs/community/tests/integration_tests/memory/test_upstash_redis.py +++ b/libs/community/tests/integration_tests/memory/test_upstash_redis.py @@ -1,12 +1,12 @@ import json import pytest +from langchain.memory import ConversationBufferMemory +from langchain_core.messages import message_to_dict + from langchain_community.chat_message_histories.upstash_redis import ( UpstashRedisChatMessageHistory, ) -from langchain_core.messages import message_to_dict - -from langchain.memory import ConversationBufferMemory URL = "" TOKEN = "" diff --git a/libs/community/tests/integration_tests/memory/test_xata.py b/libs/community/tests/integration_tests/memory/test_xata.py index bfcf90e98a4..c53c2963fde 100644 --- a/libs/community/tests/integration_tests/memory/test_xata.py +++ b/libs/community/tests/integration_tests/memory/test_xata.py @@ -6,10 +6,10 @@ Before running this test, please create a Xata database. import json import os -from langchain_community.chat_message_histories import XataChatMessageHistory +from langchain.memory import ConversationBufferMemory from langchain_core.messages import message_to_dict -from langchain.memory import ConversationBufferMemory +from langchain_community.chat_message_histories import XataChatMessageHistory class TestXata: diff --git a/libs/community/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py b/libs/community/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py index ba64456b17d..c8032987972 100644 --- a/libs/community/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py +++ b/libs/community/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py @@ -1,11 +1,12 @@ """Test functionality related to ngram overlap based selector.""" import pytest +from langchain_core.prompts import PromptTemplate + from langchain_community.example_selectors import ( NGramOverlapExampleSelector, ngram_overlap_score, ) -from langchain_core.prompts import PromptTemplate EXAMPLES = [ {"input": "See Spot run.", "output": "foo1"}, diff --git a/libs/community/tests/integration_tests/retrievers/document_compressors/test_base.py b/libs/community/tests/integration_tests/retrievers/document_compressors/test_base.py index 9414592511a..500e32a392d 100644 --- a/libs/community/tests/integration_tests/retrievers/document_compressors/test_base.py +++ b/libs/community/tests/integration_tests/retrievers/document_compressors/test_base.py @@ -1,13 +1,13 @@ """Integration test for compression pipelines.""" -from langchain_community.document_transformers import EmbeddingsRedundantFilter -from langchain_community.embeddings import OpenAIEmbeddings -from langchain_core.documents import Document -from langchain_text_splitters.character import CharacterTextSplitter - from langchain.retrievers.document_compressors import ( DocumentCompressorPipeline, EmbeddingsFilter, ) +from langchain_core.documents import Document +from langchain_text_splitters.character import CharacterTextSplitter + +from langchain_community.document_transformers import EmbeddingsRedundantFilter +from langchain_community.embeddings import OpenAIEmbeddings def test_document_compressor_pipeline() -> None: diff --git a/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py b/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py index eb584162003..a99b1159901 100644 --- a/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py +++ b/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py @@ -1,8 +1,8 @@ """Integration test for LLMChainExtractor.""" -from langchain_community.chat_models import ChatOpenAI +from langchain.retrievers.document_compressors import LLMChainExtractor from langchain_core.documents import Document -from langchain.retrievers.document_compressors import LLMChainExtractor +from langchain_community.chat_models import ChatOpenAI def test_llm_construction_with_kwargs() -> None: diff --git a/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py b/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py index 55249a879a1..c6e1d5b41cc 100644 --- a/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py +++ b/libs/community/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py @@ -1,8 +1,8 @@ """Integration test for llm-based relevant doc filtering.""" -from langchain_community.chat_models import ChatOpenAI +from langchain.retrievers.document_compressors import LLMChainFilter from langchain_core.documents import Document -from langchain.retrievers.document_compressors import LLMChainFilter +from langchain_community.chat_models import ChatOpenAI def test_llm_chain_filter() -> None: diff --git a/libs/community/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py b/libs/community/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py index b433d57bf2b..d90f09ff312 100644 --- a/libs/community/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py +++ b/libs/community/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py @@ -1,12 +1,12 @@ """Integration test for embedding-based relevant doc filtering.""" import numpy as np +from langchain.retrievers.document_compressors import EmbeddingsFilter +from langchain_core.documents import Document + from langchain_community.document_transformers.embeddings_redundant_filter import ( _DocumentWithState, ) from langchain_community.embeddings import OpenAIEmbeddings -from langchain_core.documents import Document - -from langchain.retrievers.document_compressors import EmbeddingsFilter def test_embeddings_filter() -> None: diff --git a/libs/community/tests/integration_tests/retrievers/test_contextual_compression.py b/libs/community/tests/integration_tests/retrievers/test_contextual_compression.py index 020cb1133cb..203cd222d47 100644 --- a/libs/community/tests/integration_tests/retrievers/test_contextual_compression.py +++ b/libs/community/tests/integration_tests/retrievers/test_contextual_compression.py @@ -1,9 +1,9 @@ -from langchain_community.embeddings import OpenAIEmbeddings -from langchain_community.vectorstores import FAISS - from langchain.retrievers.contextual_compression import ContextualCompressionRetriever from langchain.retrievers.document_compressors import EmbeddingsFilter +from langchain_community.embeddings import OpenAIEmbeddings +from langchain_community.vectorstores import FAISS + def test_contextual_compression_retriever_get_relevant_docs() -> None: """Test get_relevant_docs.""" diff --git a/libs/community/tests/integration_tests/retrievers/test_merger_retriever.py b/libs/community/tests/integration_tests/retrievers/test_merger_retriever.py index ec0eeb4cf3f..b5b575e7c3c 100644 --- a/libs/community/tests/integration_tests/retrievers/test_merger_retriever.py +++ b/libs/community/tests/integration_tests/retrievers/test_merger_retriever.py @@ -1,8 +1,8 @@ +from langchain.retrievers.merger_retriever import MergerRetriever + from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores import Chroma -from langchain.retrievers.merger_retriever import MergerRetriever - def test_merger_retriever_get_relevant_docs() -> None: """Test get_relevant_docs.""" diff --git a/libs/community/tests/integration_tests/smith/evaluation/test_runner_utils.py b/libs/community/tests/integration_tests/smith/evaluation/test_runner_utils.py index 285e63b9d9f..37ff9de58a3 100644 --- a/libs/community/tests/integration_tests/smith/evaluation/test_runner_utils.py +++ b/libs/community/tests/integration_tests/smith/evaluation/test_runner_utils.py @@ -2,19 +2,19 @@ from typing import Iterator, List, Optional from uuid import uuid4 import pytest -from langchain_community.chat_models import ChatOpenAI -from langchain_community.llms.openai import OpenAI +from langchain.chains.llm import LLMChain +from langchain.evaluation import EvaluatorType +from langchain.smith import RunEvalConfig, run_on_dataset +from langchain.smith.evaluation import InputFormatError +from langchain.smith.evaluation.runner_utils import arun_on_dataset from langchain_core.messages import BaseMessage, HumanMessage from langchain_core.prompts.chat import ChatPromptTemplate from langsmith import Client as Client from langsmith.evaluation import run_evaluator from langsmith.schemas import DataType, Example, Run -from langchain.chains.llm import LLMChain -from langchain.evaluation import EvaluatorType -from langchain.smith import RunEvalConfig, run_on_dataset -from langchain.smith.evaluation import InputFormatError -from langchain.smith.evaluation.runner_utils import arun_on_dataset +from langchain_community.chat_models import ChatOpenAI +from langchain_community.llms.openai import OpenAI def _check_all_feedback_passed(_project_name: str, client: Client) -> None: diff --git a/libs/community/tests/integration_tests/test_document_transformers.py b/libs/community/tests/integration_tests/test_document_transformers.py index c7293148f15..c8a3d1a9485 100644 --- a/libs/community/tests/integration_tests/test_document_transformers.py +++ b/libs/community/tests/integration_tests/test_document_transformers.py @@ -1,12 +1,13 @@ """Integration test for embedding-based redundant doc filtering.""" +from langchain_core.documents import Document + from langchain_community.document_transformers.embeddings_redundant_filter import ( EmbeddingsClusteringFilter, EmbeddingsRedundantFilter, _DocumentWithState, ) from langchain_community.embeddings import OpenAIEmbeddings -from langchain_core.documents import Document def test_embeddings_redundant_filter() -> None: diff --git a/libs/community/tests/integration_tests/test_nuclia_transformer.py b/libs/community/tests/integration_tests/test_nuclia_transformer.py index 5dc1c70ed67..0d3bc3ee42e 100644 --- a/libs/community/tests/integration_tests/test_nuclia_transformer.py +++ b/libs/community/tests/integration_tests/test_nuclia_transformer.py @@ -3,11 +3,12 @@ import json from typing import Any from unittest import mock +from langchain_core.documents import Document + from langchain_community.document_transformers.nuclia_text_transform import ( NucliaTextTransformer, ) from langchain_community.tools.nuclia.tool import NucliaUnderstandingAPI -from langchain_core.documents import Document def fakerun(**args: Any) -> Any: diff --git a/libs/community/tests/unit_tests/test_cache.py b/libs/community/tests/unit_tests/test_cache.py index 7b1a8965634..b275a61ca8e 100644 --- a/libs/community/tests/unit_tests/test_cache.py +++ b/libs/community/tests/unit_tests/test_cache.py @@ -4,6 +4,7 @@ from typing import Dict, Generator, List, Union import pytest from _pytest.fixtures import FixtureRequest +from langchain.globals import get_llm_cache, set_llm_cache from langchain_core.caches import InMemoryCache from langchain_core.language_models import FakeListChatModel, FakeListLLM from langchain_core.language_models.chat_models import BaseChatModel @@ -14,8 +15,6 @@ from langchain_core.outputs import ChatGeneration, Generation from sqlalchemy import create_engine from sqlalchemy.orm import Session -from langchain.globals import get_llm_cache, set_llm_cache - pytest.importorskip("langchain_community") from langchain_community.cache import SQLAlchemyCache # noqa: E402