community[patch]: import flattening fix (#20110)

This PR should make it easier for linters to do type checking and for IDEs to jump to definition of code.

See #20050 as a template for this PR.
- As a byproduct: Added 3 missed `test_imports`.
- Added missed `SolarChat` in to __init___.py Added it into test_import
ut.
- Added `# type: ignore` to fix linting. It is not clear, why linting
errors appear after ^ changes.

---------

Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com>
This commit is contained in:
Leonid Ganeline
2024-04-10 10:01:19 -07:00
committed by GitHub
parent 12190ad728
commit 4cb5f4c353
60 changed files with 2973 additions and 163 deletions

View File

@@ -108,7 +108,7 @@ def test_bedrock_streaming(chat: BedrockChat) -> None:
full = None
for token in chat.stream("I'm Pickle Rick"):
full = token if full is None else full + token
full = token if full is None else full + token # type: ignore[operator]
assert isinstance(token.content, str)
assert isinstance(cast(AIMessageChunk, full).content, str)

View File

@@ -66,7 +66,7 @@ def assert_docs(docs: List[Document], all_meta: bool = False) -> None:
def test_run_success(api_client: OutlineAPIWrapper) -> None:
responses.add(
responses.POST,
api_client.outline_instance_url + api_client.outline_search_endpoint,
api_client.outline_instance_url + api_client.outline_search_endpoint, # type: ignore[operator]
json=OUTLINE_SUCCESS_RESPONSE,
status=200,
)
@@ -80,7 +80,7 @@ def test_run_success_all_meta(api_client: OutlineAPIWrapper) -> None:
api_client.load_all_available_meta = True
responses.add(
responses.POST,
api_client.outline_instance_url + api_client.outline_search_endpoint,
api_client.outline_instance_url + api_client.outline_search_endpoint, # type: ignore[operator]
json=OUTLINE_SUCCESS_RESPONSE,
status=200,
)
@@ -93,7 +93,7 @@ def test_run_success_all_meta(api_client: OutlineAPIWrapper) -> None:
def test_run_no_result(api_client: OutlineAPIWrapper) -> None:
responses.add(
responses.POST,
api_client.outline_instance_url + api_client.outline_search_endpoint,
api_client.outline_instance_url + api_client.outline_search_endpoint, # type: ignore[operator]
json=OUTLINE_EMPTY_RESPONSE,
status=200,
)
@@ -106,7 +106,7 @@ def test_run_no_result(api_client: OutlineAPIWrapper) -> None:
def test_run_error(api_client: OutlineAPIWrapper) -> None:
responses.add(
responses.POST,
api_client.outline_instance_url + api_client.outline_search_endpoint,
api_client.outline_instance_url + api_client.outline_search_endpoint, # type: ignore[operator]
json=OUTLINE_ERROR_RESPONSE,
status=401,
)

View File

@@ -8,7 +8,7 @@ from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
@pytest.fixture
def deeplake_datastore() -> DeepLake:
def deeplake_datastore() -> DeepLake: # type: ignore[misc]
texts = ["foo", "bar", "baz"]
metadatas = [{"page": str(i)} for i in range(len(texts))]
docsearch = DeepLake.from_texts(

View File

@@ -85,7 +85,7 @@ def test_lantern_embeddings_distance_strategy() -> None:
collection_name="test_collection",
embedding=FakeEmbeddingsWithAdaDimension(),
connection_string=CONNECTION_STRING,
distance_strategy="hamming",
distance_strategy="hamming", # type: ignore[arg-type]
pre_delete_collection=True,
)
output = docsearch.similarity_search("foo", k=1)

View File

@@ -26,7 +26,7 @@ def _milvus_from_texts(
def _get_pks(expr: str, docsearch: Milvus) -> List[Any]:
return docsearch.get_pks(expr)
return docsearch.get_pks(expr) # type: ignore[return-value]
def test_milvus() -> None:
@@ -51,7 +51,7 @@ def test_milvus_with_id() -> None:
assert output == [Document(page_content="foo")]
output = docsearch.delete(ids=ids)
assert output.delete_count == len(fake_texts)
assert output.delete_count == len(fake_texts) # type: ignore[attr-defined]
try:
ids = ["dup_id" for _ in fake_texts]
@@ -146,7 +146,7 @@ def test_milvus_upsert_entities() -> None:
Document(page_content="test_2", metadata={"id": 3}),
]
ids = docsearch.upsert(pks, documents)
assert len(ids) == 2
assert len(ids) == 2 # type: ignore[arg-type]
# if __name__ == "__main__":

View File

@@ -320,7 +320,7 @@ def test_relevance_score() -> None:
except ValueError:
pass
docsearch_l2.drop_vectorstore()
docsearch_l2.drop_vectorstore() # type: ignore[attr-defined]
def test_retriever_search_threshold() -> None:

View File

@@ -37,7 +37,7 @@ def vdms_client() -> vdms.vdms:
@pytest.mark.requires("vdms")
def test_init_from_client(vdms_client: vdms.vdms) -> None:
embedding_function = FakeEmbeddings()
_ = VDMS(
_ = VDMS( # type: ignore[call-arg]
embedding_function=embedding_function,
client=vdms_client,
)
@@ -331,7 +331,7 @@ def test_with_relevance_score(vdms_client: vdms.vdms) -> None:
def test_add_documents_no_metadata(vdms_client: vdms.vdms) -> None:
collection_name = "test_add_documents_no_metadata"
embedding_function = FakeEmbeddings()
db = VDMS(
db = VDMS( # type: ignore[call-arg]
collection_name=collection_name,
embedding_function=embedding_function,
client=vdms_client,
@@ -343,7 +343,7 @@ def test_add_documents_no_metadata(vdms_client: vdms.vdms) -> None:
def test_add_documents_mixed_metadata(vdms_client: vdms.vdms) -> None:
collection_name = "test_add_documents_mixed_metadata"
embedding_function = FakeEmbeddings()
db = VDMS(
db = VDMS( # type: ignore[call-arg]
collection_name=collection_name,
embedding_function=embedding_function,
client=vdms_client,