chore(langchain): add mypy warn_unreachable setting (#32529)

See
https://mypy.readthedocs.io/en/stable/config_file.html#confval-warn_unreachable

---------

Co-authored-by: Mason Daugherty <github@mdrxy.com>
This commit is contained in:
Christophe Bornet 2025-08-16 01:03:53 +02:00 committed by GitHub
parent d3d23e2372
commit 791d309c06
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 55 additions and 46 deletions

View File

@ -1375,7 +1375,7 @@ class AgentExecutor(Chain):
elif callable(self.handle_parsing_errors): elif callable(self.handle_parsing_errors):
observation = self.handle_parsing_errors(e) observation = self.handle_parsing_errors(e)
else: else:
msg = "Got unexpected type of `handle_parsing_errors`" msg = "Got unexpected type of `handle_parsing_errors`" # type: ignore[unreachable]
raise ValueError(msg) from e # noqa: TRY004 raise ValueError(msg) from e # noqa: TRY004
output = AgentAction("_Exception", observation, text) output = AgentAction("_Exception", observation, text)
if run_manager: if run_manager:
@ -1514,7 +1514,7 @@ class AgentExecutor(Chain):
elif callable(self.handle_parsing_errors): elif callable(self.handle_parsing_errors):
observation = self.handle_parsing_errors(e) observation = self.handle_parsing_errors(e)
else: else:
msg = "Got unexpected type of `handle_parsing_errors`" msg = "Got unexpected type of `handle_parsing_errors`" # type: ignore[unreachable]
raise ValueError(msg) from e # noqa: TRY004 raise ValueError(msg) from e # noqa: TRY004
output = AgentAction("_Exception", observation, text) output = AgentAction("_Exception", observation, text)
tool_run_kwargs = self._action_agent.tool_run_logging_kwargs() tool_run_kwargs = self._action_agent.tool_run_logging_kwargs()

View File

@ -1,5 +1,6 @@
import json import json
from collections.abc import Sequence from collections.abc import Sequence
from typing import Any
from langchain_core.agents import AgentAction, AgentActionMessageLog from langchain_core.agents import AgentAction, AgentActionMessageLog
from langchain_core.messages import AIMessage, BaseMessage, FunctionMessage from langchain_core.messages import AIMessage, BaseMessage, FunctionMessage
@ -30,7 +31,7 @@ def _convert_agent_action_to_messages(
def _create_function_message( def _create_function_message(
agent_action: AgentAction, agent_action: AgentAction,
observation: str, observation: Any,
) -> FunctionMessage: ) -> FunctionMessage:
"""Convert agent action and observation into a function message. """Convert agent action and observation into a function message.
Args: Args:

View File

@ -1,5 +1,6 @@
import json import json
from collections.abc import Sequence from collections.abc import Sequence
from typing import Any
from langchain_core.agents import AgentAction from langchain_core.agents import AgentAction
from langchain_core.messages import ( from langchain_core.messages import (
@ -13,7 +14,7 @@ from langchain.agents.output_parsers.tools import ToolAgentAction
def _create_tool_message( def _create_tool_message(
agent_action: ToolAgentAction, agent_action: ToolAgentAction,
observation: str, observation: Any,
) -> ToolMessage: ) -> ToolMessage:
"""Convert agent action and observation into a tool message. """Convert agent action and observation into a tool message.

View File

@ -170,7 +170,7 @@ class ZeroShotAgent(Agent):
raise ValueError(msg) raise ValueError(msg)
for tool in tools: for tool in tools:
if tool.description is None: if tool.description is None:
msg = ( msg = ( # type: ignore[unreachable]
f"Got a tool {tool.name} without a description. For this agent, " f"Got a tool {tool.name} without a description. For this agent, "
f"a description must always be provided." f"a description must always be provided."
) )

View File

@ -45,9 +45,9 @@ class JSONAgentOutputParser(AgentOutputParser):
def parse(self, text: str) -> Union[AgentAction, AgentFinish]: def parse(self, text: str) -> Union[AgentAction, AgentFinish]:
try: try:
response = parse_json_markdown(text) response = parse_json_markdown(text)
if isinstance(response, list): if isinstance(response, list): # type: ignore[unreachable]
# gpt turbo frequently ignores the directive to emit a single action # gpt turbo frequently ignores the directive to emit a single action
logger.warning("Got multiple action responses: %s", response) logger.warning("Got multiple action responses: %s", response) # type: ignore[unreachable]
response = response[0] response = response[0]
if response["action"] == "Final Answer": if response["action"] == "Final Answer":
return AgentFinish({"output": response["action_input"]}, text) return AgentFinish({"output": response["action_input"]}, text)

View File

@ -286,7 +286,7 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC):
def output_keys(self) -> list[str]: def output_keys(self) -> list[str]:
"""Keys expected to be in the chain output.""" """Keys expected to be in the chain output."""
def _validate_inputs(self, inputs: dict[str, Any]) -> None: def _validate_inputs(self, inputs: Any) -> None:
"""Check that all inputs are present.""" """Check that all inputs are present."""
if not isinstance(inputs, dict): if not isinstance(inputs, dict):
_input_keys = set(self.input_keys) _input_keys = set(self.input_keys)

View File

@ -54,7 +54,7 @@ def _get_chat_history(chat_history: list[CHAT_TURN_TYPE]) -> str:
ai = "Assistant: " + dialogue_turn[1] ai = "Assistant: " + dialogue_turn[1]
buffer += f"\n{human}\n{ai}" buffer += f"\n{human}\n{ai}"
else: else:
msg = ( msg = ( # type: ignore[unreachable]
f"Unsupported chat history format: {type(dialogue_turn)}." f"Unsupported chat history format: {type(dialogue_turn)}."
f" Full chat history: {chat_history} " f" Full chat history: {chat_history} "
) )

View File

@ -9,6 +9,8 @@ from typing_extensions import TypedDict
try: try:
check_package_version("lark", gte_version="1.1.5") check_package_version("lark", gte_version="1.1.5")
from lark import Lark, Transformer, v_args from lark import Lark, Transformer, v_args
_HAS_LARK = True
except ImportError: except ImportError:
def v_args(*_: Any, **__: Any) -> Any: # type: ignore[misc] def v_args(*_: Any, **__: Any) -> Any: # type: ignore[misc]
@ -17,6 +19,7 @@ except ImportError:
Transformer = object # type: ignore[assignment,misc] Transformer = object # type: ignore[assignment,misc]
Lark = object # type: ignore[assignment,misc] Lark = object # type: ignore[assignment,misc]
_HAS_LARK = False
from langchain_core.structured_query import ( from langchain_core.structured_query import (
Comparator, Comparator,
@ -260,8 +263,7 @@ def get_parser(
Returns: Returns:
Lark parser for the query language. Lark parser for the query language.
""" """
# QueryTransformer is None when Lark cannot be imported. if not _HAS_LARK:
if QueryTransformer is None:
msg = "Cannot import lark, please install it with 'pip install lark'." msg = "Cannot import lark, please install it with 'pip install lark'."
raise ImportError(msg) raise ImportError(msg)
transformer = QueryTransformer( transformer = QueryTransformer(

View File

@ -439,7 +439,7 @@ def create_structured_output_runnable(
output_parser=output_parser, output_parser=output_parser,
**kwargs, **kwargs,
) )
msg = ( msg = ( # type: ignore[unreachable]
f"Invalid mode {mode}. Expected one of 'openai-tools', 'openai-functions', " f"Invalid mode {mode}. Expected one of 'openai-tools', 'openai-functions', "
f"'openai-json'." f"'openai-json'."
) )

View File

@ -336,7 +336,7 @@ class CacheBackedEmbeddings(Embeddings):
) )
raise ValueError(msg) raise ValueError(msg)
else: else:
msg = ( msg = ( # type: ignore[unreachable]
"key_encoder must be either 'blake2b', 'sha1', 'sha256', 'sha512' " "key_encoder must be either 'blake2b', 'sha1', 'sha256', 'sha512' "
"or a callable that encodes keys." "or a callable that encodes keys."
) )

View File

@ -9,7 +9,7 @@ sessions.
import warnings import warnings
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any, Optional
from langchain_core.messages import BaseMessage from langchain_core.messages import BaseMessage
from langchain_core.prompts.chat import SystemMessagePromptTemplate from langchain_core.prompts.chat import SystemMessagePromptTemplate
@ -109,7 +109,7 @@ class ConversationVectorStoreTokenBufferMemory(ConversationTokenBufferMemory):
previous_history_template: str = DEFAULT_HISTORY_TEMPLATE previous_history_template: str = DEFAULT_HISTORY_TEMPLATE
split_chunk_size: int = 1000 split_chunk_size: int = 1000
_memory_retriever: VectorStoreRetrieverMemory = PrivateAttr(default=None) # type: ignore[assignment] _memory_retriever: Optional[VectorStoreRetrieverMemory] = PrivateAttr(default=None)
_timestamps: list[datetime] = PrivateAttr(default_factory=list) _timestamps: list[datetime] = PrivateAttr(default_factory=list)
@property @property

View File

@ -34,7 +34,7 @@ class ModelLaboratory:
""" """
for chain in chains: for chain in chains:
if not isinstance(chain, Chain): if not isinstance(chain, Chain):
msg = ( msg = ( # type: ignore[unreachable]
"ModelLaboratory should now be initialized with Chains. " "ModelLaboratory should now be initialized with Chains. "
"If you want to initialize with LLMs, use the `from_llms` method " "If you want to initialize with LLMs, use the `from_llms` method "
"instead (`ModelLaboratory.from_llms(...)`)" "instead (`ModelLaboratory.from_llms(...)`)"

View File

@ -47,7 +47,7 @@ class DocumentCompressorPipeline(BaseDocumentCompressor):
elif isinstance(_transformer, BaseDocumentTransformer): elif isinstance(_transformer, BaseDocumentTransformer):
documents = _transformer.transform_documents(documents) documents = _transformer.transform_documents(documents)
else: else:
msg = f"Got unexpected transformer type: {_transformer}" msg = f"Got unexpected transformer type: {_transformer}" # type: ignore[unreachable]
raise ValueError(msg) # noqa: TRY004 raise ValueError(msg) # noqa: TRY004
return documents return documents
@ -77,6 +77,6 @@ class DocumentCompressorPipeline(BaseDocumentCompressor):
elif isinstance(_transformer, BaseDocumentTransformer): elif isinstance(_transformer, BaseDocumentTransformer):
documents = await _transformer.atransform_documents(documents) documents = await _transformer.atransform_documents(documents)
else: else:
msg = f"Got unexpected transformer type: {_transformer}" msg = f"Got unexpected transformer type: {_transformer}" # type: ignore[unreachable]
raise ValueError(msg) # noqa: TRY004 raise ValueError(msg) # noqa: TRY004
return documents return documents

View File

@ -236,7 +236,7 @@ class EnsembleRetriever(BaseRetriever):
# Enforce that retrieved docs are Documents for each list in retriever_docs # Enforce that retrieved docs are Documents for each list in retriever_docs
for i in range(len(retriever_docs)): for i in range(len(retriever_docs)):
retriever_docs[i] = [ retriever_docs[i] = [
Document(page_content=cast("str", doc)) if isinstance(doc, str) else doc Document(page_content=cast("str", doc)) if isinstance(doc, str) else doc # type: ignore[unreachable]
for doc in retriever_docs[i] for doc in retriever_docs[i]
] ]

View File

@ -238,7 +238,7 @@ def _wrap_in_chain_factory(
return lambda: RunnableLambda(constructor) return lambda: RunnableLambda(constructor)
# Typical correct case # Typical correct case
return constructor return constructor
return llm_or_chain_factory return llm_or_chain_factory # type: ignore[unreachable]
def _get_prompt(inputs: dict[str, Any]) -> str: def _get_prompt(inputs: dict[str, Any]) -> str:
@ -679,7 +679,7 @@ def _load_run_evaluators(
elif callable(custom_evaluator): elif callable(custom_evaluator):
run_evaluators.append(run_evaluator_dec(custom_evaluator)) run_evaluators.append(run_evaluator_dec(custom_evaluator))
else: else:
msg = ( msg = ( # type: ignore[unreachable]
f"Unsupported custom evaluator: {custom_evaluator}." f"Unsupported custom evaluator: {custom_evaluator}."
f" Expected RunEvaluator or StringEvaluator." f" Expected RunEvaluator or StringEvaluator."
) )

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import uuid import uuid
from abc import abstractmethod from abc import abstractmethod
from typing import Any, Optional from typing import Any, Optional, Union, cast
from langchain_core.callbacks.manager import ( from langchain_core.callbacks.manager import (
AsyncCallbackManagerForChainRun, AsyncCallbackManagerForChainRun,
@ -55,16 +55,20 @@ class StringRunMapper(Serializable):
class LLMStringRunMapper(StringRunMapper): class LLMStringRunMapper(StringRunMapper):
"""Extract items to evaluate from the run object.""" """Extract items to evaluate from the run object."""
def serialize_chat_messages(self, messages: list[dict]) -> str: def serialize_chat_messages(
self, messages: Union[list[dict], list[list[dict]]]
) -> str:
"""Extract the input messages from the run.""" """Extract the input messages from the run."""
if isinstance(messages, list) and messages: if isinstance(messages, list) and messages:
if isinstance(messages[0], dict): if isinstance(messages[0], dict):
chat_messages = _get_messages_from_run_dict(messages) chat_messages = _get_messages_from_run_dict(
cast("list[dict]", messages)
)
elif isinstance(messages[0], list): elif isinstance(messages[0], list):
# Runs from Tracer have messages as a list of lists of dicts # Runs from Tracer have messages as a list of lists of dicts
chat_messages = _get_messages_from_run_dict(messages[0]) chat_messages = _get_messages_from_run_dict(messages[0])
else: else:
msg = f"Could not extract messages to evaluate {messages}" msg = f"Could not extract messages to evaluate {messages}" # type: ignore[unreachable]
raise ValueError(msg) raise ValueError(msg)
return get_buffer_string(chat_messages) return get_buffer_string(chat_messages)
msg = f"Could not extract messages to evaluate {messages}" msg = f"Could not extract messages to evaluate {messages}"
@ -107,11 +111,11 @@ class LLMStringRunMapper(StringRunMapper):
if not outputs.get("generations"): if not outputs.get("generations"):
msg = "Cannot evaluate LLM Run without generations." msg = "Cannot evaluate LLM Run without generations."
raise ValueError(msg) raise ValueError(msg)
generations: list[dict] = outputs["generations"] generations: Union[list[dict], list[list[dict]]] = outputs["generations"]
if not generations: if not generations:
msg = "Cannot evaluate LLM run with empty generations." msg = "Cannot evaluate LLM run with empty generations."
raise ValueError(msg) raise ValueError(msg)
first_generation: dict = generations[0] first_generation: Union[dict, list[dict]] = generations[0]
if isinstance(first_generation, list): if isinstance(first_generation, list):
# Runs from Tracer have generations as a list of lists of dicts # Runs from Tracer have generations as a list of lists of dicts
# Whereas Runs from the API have a list of dicts # Whereas Runs from the API have a list of dicts
@ -450,7 +454,7 @@ class StringRunEvaluatorChain(Chain, RunEvaluator):
): ):
example_mapper = StringExampleMapper(reference_key=reference_key) example_mapper = StringExampleMapper(reference_key=reference_key)
elif evaluator.requires_reference: elif evaluator.requires_reference:
msg = ( msg = ( # type: ignore[unreachable]
f"Evaluator {evaluator.evaluation_name} requires a reference" f"Evaluator {evaluator.evaluation_name} requires a reference"
" example from the dataset. Please specify the reference key from" " example from the dataset. Please specify the reference key from"
" amongst the dataset outputs keys." " amongst the dataset outputs keys."

View File

@ -1,6 +1,6 @@
"""Create a key-value store for any langchain serializable object.""" """Create a key-value store for any langchain serializable object."""
from typing import Callable, Optional from typing import Any, Callable, Optional
from langchain_core.documents import Document from langchain_core.documents import Document
from langchain_core.load import Serializable, dumps, loads from langchain_core.load import Serializable, dumps, loads
@ -14,7 +14,7 @@ def _dump_as_bytes(obj: Serializable) -> bytes:
return dumps(obj).encode("utf-8") return dumps(obj).encode("utf-8")
def _dump_document_as_bytes(obj: Document) -> bytes: def _dump_document_as_bytes(obj: Any) -> bytes:
"""Return a bytes representation of a document.""" """Return a bytes representation of a document."""
if not isinstance(obj, Document): if not isinstance(obj, Document):
msg = "Expected a Document instance" msg = "Expected a Document instance"

View File

@ -129,6 +129,7 @@ strict_bytes = "True"
ignore_missing_imports = "True" ignore_missing_imports = "True"
enable_error_code = "deprecated" enable_error_code = "deprecated"
report_deprecated_as_note = "True" report_deprecated_as_note = "True"
warn_unreachable = "True"
# TODO: activate for 'strict' checking # TODO: activate for 'strict' checking
disallow_untyped_calls = "False" disallow_untyped_calls = "False"

View File

@ -514,20 +514,20 @@ async def test_runnable_agent() -> None:
] ]
# stream log # stream log
results: list[RunLogPatch] = [ # type: ignore[no-redef] log_results: list[RunLogPatch] = [
r async for r in executor.astream_log({"question": "hello"}) r async for r in executor.astream_log({"question": "hello"})
] ]
# # Let's stream just the llm tokens. # # Let's stream just the llm tokens.
messages = [] messages = []
for log_record in results: for log_record in log_results:
for op in log_record.ops: # type: ignore[attr-defined] for op in log_record.ops:
if op["op"] == "add" and isinstance(op["value"], AIMessageChunk): if op["op"] == "add" and isinstance(op["value"], AIMessageChunk):
messages.append(op["value"]) # noqa: PERF401 messages.append(op["value"]) # noqa: PERF401
assert messages != [] assert messages != []
# Aggregate state # Aggregate state
run_log = reduce(operator.add, results) run_log = reduce(operator.add, log_results)
assert isinstance(run_log, RunLog) assert isinstance(run_log, RunLog)

View File

@ -23,7 +23,7 @@ def get_action_and_input(text: str) -> tuple[str, str]:
return output.tool, str(output.tool_input) return output.tool, str(output.tool_input)
if isinstance(output, AgentFinish): if isinstance(output, AgentFinish):
return output.return_values["output"], output.log return output.return_values["output"], output.log
msg = "Unexpected output type" msg = "Unexpected output type" # type: ignore[unreachable]
raise ValueError(msg) raise ValueError(msg)

View File

@ -1,6 +1,6 @@
"""Test LLM-generated structured query parsing.""" """Test LLM-generated structured query parsing."""
from typing import Any, cast from typing import Any, Optional, cast
import lark import lark
import pytest import pytest
@ -149,7 +149,7 @@ def test_parse_date_value(x: str) -> None:
), ),
], ],
) )
def test_parse_datetime_value(x: str, expected: dict) -> None: def test_parse_datetime_value(x: str, expected: Optional[dict[str, str]]) -> None:
"""Test parsing of datetime values with ISO 8601 format.""" """Test parsing of datetime values with ISO 8601 format."""
try: try:
parsed = cast("Comparison", DEFAULT_PARSER.parse(f'eq("publishedAt", {x})')) parsed = cast("Comparison", DEFAULT_PARSER.parse(f'eq("publishedAt", {x})'))

View File

@ -107,7 +107,7 @@ class GenericFakeChatModel(BaseChatModel):
**kwargs, **kwargs,
) )
if not isinstance(chat_result, ChatResult): if not isinstance(chat_result, ChatResult):
msg = ( msg = ( # type: ignore[unreachable]
f"Expected generate to return a ChatResult, " f"Expected generate to return a ChatResult, "
f"but got {type(chat_result)} instead." f"but got {type(chat_result)} instead."
) )

View File

@ -2346,7 +2346,7 @@ wheels = [
[[package]] [[package]]
name = "langchain-openai" name = "langchain-openai"
version = "0.3.29" version = "0.3.30"
source = { editable = "../partners/openai" } source = { editable = "../partners/openai" }
dependencies = [ dependencies = [
{ name = "langchain-core" }, { name = "langchain-core" },
@ -2357,14 +2357,14 @@ dependencies = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "langchain-core", editable = "../core" }, { name = "langchain-core", editable = "../core" },
{ name = "openai", specifier = ">=1.86.0,<2.0.0" }, { name = "openai", specifier = ">=1.99.9,<2.0.0" },
{ name = "tiktoken", specifier = ">=0.7,<1" }, { name = "tiktoken", specifier = ">=0.7,<1" },
] ]
[package.metadata.requires-dev] [package.metadata.requires-dev]
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
dev = [{ name = "langchain-core", editable = "../core" }] dev = [{ name = "langchain-core", editable = "../core" }]
lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }]
test = [ test = [
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
{ name = "langchain-core", editable = "../core" }, { name = "langchain-core", editable = "../core" },
@ -2390,7 +2390,7 @@ test-integration = [
] ]
typing = [ typing = [
{ name = "langchain-core", editable = "../core" }, { name = "langchain-core", editable = "../core" },
{ name = "mypy", specifier = ">=1.10,<2.0" }, { name = "mypy", specifier = ">=1.17.1,<2.0" },
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" }, { name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
] ]
@ -2497,7 +2497,7 @@ test-integration = [
] ]
typing = [ typing = [
{ name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" }, { name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" },
{ name = "mypy", specifier = ">=1.15,<2.0" }, { name = "mypy", specifier = ">=1.17.1,<1.18" },
{ name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" },
{ name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" }, { name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" },
] ]
@ -3066,7 +3066,7 @@ wheels = [
[[package]] [[package]]
name = "openai" name = "openai"
version = "1.87.0" version = "1.99.9"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "anyio" }, { name = "anyio" },
@ -3078,9 +3078,9 @@ dependencies = [
{ name = "tqdm" }, { name = "tqdm" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/47/ed/2b3f6c7e950784e9442115ab8ebeff514d543fb33da10607b39364645a75/openai-1.87.0.tar.gz", hash = "sha256:5c69764171e0db9ef993e7a4d8a01fd8ff1026b66f8bdd005b9461782b6e7dfc", size = 470880, upload-time = "2025-06-16T19:04:26.316Z" } sdist = { url = "https://files.pythonhosted.org/packages/8a/d2/ef89c6f3f36b13b06e271d3cc984ddd2f62508a0972c1cbcc8485a6644ff/openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92", size = 506992, upload-time = "2025-08-12T02:31:10.054Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/36/ac/313ded47ce1d5bc2ec02ed5dd5506bf5718678a4655ac20f337231d9aae3/openai-1.87.0-py3-none-any.whl", hash = "sha256:f9bcae02ac4fff6522276eee85d33047335cfb692b863bd8261353ce4ada5692", size = 734368, upload-time = "2025-06-16T19:04:23.181Z" }, { url = "https://files.pythonhosted.org/packages/e8/fb/df274ca10698ee77b07bff952f302ea627cc12dac6b85289485dd77db6de/openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a", size = 786816, upload-time = "2025-08-12T02:31:08.34Z" },
] ]
[[package]] [[package]]