mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-23 03:22:38 +00:00
chore(langchain): select ALL rules with exclusions (#31930)
Co-authored-by: Mason Daugherty <mason@langchain.dev>
This commit is contained in:
parent
7f259863e1
commit
1563099f3f
@ -1551,7 +1551,7 @@ class AgentExecutor(Chain):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO This could yield each result as it becomes available
|
# TODO: This could yield each result as it becomes available
|
||||||
for chunk in result:
|
for chunk in result:
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ from langchain.agents.agent import BaseMultiActionAgent, BaseSingleActionAgent
|
|||||||
from langchain.agents.types import AGENT_TO_CLASS
|
from langchain.agents.types import AGENT_TO_CLASS
|
||||||
from langchain.chains.loading import load_chain, load_chain_from_config
|
from langchain.chains.loading import load_chain, load_chain_from_config
|
||||||
|
|
||||||
logger = logging.getLogger(__file__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/agents/"
|
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/agents/"
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ def _parse_ai_message(message: BaseMessage) -> Union[list[AgentAction], AgentFin
|
|||||||
del _tool_input["action_name"]
|
del _tool_input["action_name"]
|
||||||
function_name = tool_schema["action_name"]
|
function_name = tool_schema["action_name"]
|
||||||
|
|
||||||
# HACK HACK HACK:
|
# A hack here:
|
||||||
# The code that encodes tool input into Open AI uses a special variable
|
# The code that encodes tool input into Open AI uses a special variable
|
||||||
# name called `__arg1` to handle old style tools that do not expose a
|
# name called `__arg1` to handle old style tools that do not expose a
|
||||||
# schema and expect a single string argument as an input.
|
# schema and expect a single string argument as an input.
|
||||||
|
@ -55,7 +55,7 @@ class OpenAIFunctionsAgentOutputParser(AgentOutputParser):
|
|||||||
)
|
)
|
||||||
raise OutputParserException(msg) from e
|
raise OutputParserException(msg) from e
|
||||||
|
|
||||||
# HACK HACK HACK:
|
# A hack here:
|
||||||
# The code that encodes tool input into Open AI uses a special variable
|
# The code that encodes tool input into Open AI uses a special variable
|
||||||
# name called `__arg1` to handle old style tools that do not expose a
|
# name called `__arg1` to handle old style tools that do not expose a
|
||||||
# schema and expect a single string argument as an input.
|
# schema and expect a single string argument as an input.
|
||||||
|
@ -56,7 +56,7 @@ def parse_ai_message_to_tool_action(
|
|||||||
)
|
)
|
||||||
raise OutputParserException(msg) from e
|
raise OutputParserException(msg) from e
|
||||||
for tool_call in tool_calls:
|
for tool_call in tool_calls:
|
||||||
# HACK HACK HACK:
|
# A hack here:
|
||||||
# The code that encodes tool input into Open AI uses a special variable
|
# The code that encodes tool input into Open AI uses a special variable
|
||||||
# name called `__arg1` to handle old style tools that do not expose a
|
# name called `__arg1` to handle old style tools that do not expose a
|
||||||
# schema and expect a single string argument as an input.
|
# schema and expect a single string argument as an input.
|
||||||
|
@ -8,7 +8,7 @@ from langchain_core.callbacks import AsyncCallbackHandler
|
|||||||
from langchain_core.outputs import LLMResult
|
from langchain_core.outputs import LLMResult
|
||||||
from typing_extensions import override
|
from typing_extensions import override
|
||||||
|
|
||||||
# TODO If used by two LLM runs in parallel this won't work as expected
|
# TODO: If used by two LLM runs in parallel this won't work as expected
|
||||||
|
|
||||||
|
|
||||||
class AsyncIteratorCallbackHandler(AsyncCallbackHandler):
|
class AsyncIteratorCallbackHandler(AsyncCallbackHandler):
|
||||||
@ -51,7 +51,7 @@ class AsyncIteratorCallbackHandler(AsyncCallbackHandler):
|
|||||||
async def on_llm_error(self, error: BaseException, **kwargs: Any) -> None:
|
async def on_llm_error(self, error: BaseException, **kwargs: Any) -> None:
|
||||||
self.done.set()
|
self.done.set()
|
||||||
|
|
||||||
# TODO implement the other methods
|
# TODO: implement the other methods
|
||||||
|
|
||||||
async def aiter(self) -> AsyncIterator[str]:
|
async def aiter(self) -> AsyncIterator[str]:
|
||||||
"""Asynchronous iterator that yields tokens."""
|
"""Asynchronous iterator that yields tokens."""
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import logging
|
||||||
import time
|
import time
|
||||||
from sys import platform
|
from sys import platform
|
||||||
from typing import (
|
from typing import (
|
||||||
@ -11,6 +12,8 @@ from typing import (
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from playwright.sync_api import Browser, CDPSession, Page
|
from playwright.sync_api import Browser, CDPSession, Page
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
black_listed_elements: set[str] = {
|
black_listed_elements: set[str] = {
|
||||||
"html",
|
"html",
|
||||||
"head",
|
"head",
|
||||||
@ -298,7 +301,7 @@ class Crawler:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
cursor = layout_node_index.index(index)
|
cursor = layout_node_index.index(index)
|
||||||
# TODO replace this with proper cursoring, ignoring the fact this is
|
# TODO: replace this with proper cursoring, ignoring the fact this is
|
||||||
# O(n^2) for the moment
|
# O(n^2) for the moment
|
||||||
except ValueError:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
|
@ -535,12 +535,6 @@ def _check_pkg(pkg: str, *, pkg_kebab: Optional[str] = None) -> None:
|
|||||||
raise ImportError(msg)
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
def _remove_prefix(s: str, prefix: str) -> str:
|
|
||||||
if s.startswith(prefix):
|
|
||||||
s = s[len(prefix) :]
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
_DECLARATIVE_METHODS = ("bind_tools", "with_structured_output")
|
_DECLARATIVE_METHODS = ("bind_tools", "with_structured_output")
|
||||||
|
|
||||||
|
|
||||||
@ -608,7 +602,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]):
|
|||||||
def _model_params(self, config: Optional[RunnableConfig]) -> dict:
|
def _model_params(self, config: Optional[RunnableConfig]) -> dict:
|
||||||
config = ensure_config(config)
|
config = ensure_config(config)
|
||||||
model_params = {
|
model_params = {
|
||||||
_remove_prefix(k, self._config_prefix): v
|
k.removeprefix(self._config_prefix): v
|
||||||
for k, v in config.get("configurable", {}).items()
|
for k, v in config.get("configurable", {}).items()
|
||||||
if k.startswith(self._config_prefix)
|
if k.startswith(self._config_prefix)
|
||||||
}
|
}
|
||||||
@ -630,7 +624,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]):
|
|||||||
remaining_config["configurable"] = {
|
remaining_config["configurable"] = {
|
||||||
k: v
|
k: v
|
||||||
for k, v in config.get("configurable", {}).items()
|
for k, v in config.get("configurable", {}).items()
|
||||||
if _remove_prefix(k, self._config_prefix) not in model_params
|
if k.removeprefix(self._config_prefix) not in model_params
|
||||||
}
|
}
|
||||||
queued_declarative_operations = list(self._queued_declarative_operations)
|
queued_declarative_operations = list(self._queued_declarative_operations)
|
||||||
if remaining_config:
|
if remaining_config:
|
||||||
|
@ -143,70 +143,42 @@ ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
|
|||||||
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
|
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [ "ALL",]
|
||||||
"A", # flake8-builtins
|
|
||||||
"ARG", # flake8-unused-arguments
|
|
||||||
"ASYNC", # flake8-async
|
|
||||||
"B", # flake8-bugbear
|
|
||||||
"C4", # flake8-comprehensions
|
|
||||||
"COM", # flake8-commas
|
|
||||||
"D1", # pydocstyle: missing docstring
|
|
||||||
"DTZ", # flake8-datetimez
|
|
||||||
"E", # pycodestyle error
|
|
||||||
"EM", # flake8-errmsg
|
|
||||||
"F", # pyflakes
|
|
||||||
"FA", # flake8-future-annotations
|
|
||||||
"FBT", # flake8-boolean-trap
|
|
||||||
"FLY", # flake8-flynt
|
|
||||||
"G", # flake8-logging-format
|
|
||||||
"I", # isort
|
|
||||||
"ICN", # flake8-import-conventions
|
|
||||||
"INT", # flake8-gettext
|
|
||||||
"ISC", # isort-comprehensions
|
|
||||||
"PERF", # flake8-perf
|
|
||||||
"PGH", # pygrep-hooks
|
|
||||||
"PIE", # flake8-pie
|
|
||||||
"PL", # pylint
|
|
||||||
"PT", # flake8-pytest-style
|
|
||||||
"PTH", # flake8-use-pathlib
|
|
||||||
"PYI", # flake8-pyi
|
|
||||||
"Q", # flake8-quotes
|
|
||||||
"RET", # flake8-return
|
|
||||||
"RSE", # flake8-rst-docstrings
|
|
||||||
"RUF", # ruff
|
|
||||||
"S", # flake8-bandit
|
|
||||||
"SIM", # flake8-simplify
|
|
||||||
"SLF", # flake8-self
|
|
||||||
"SLOT", # flake8-slots
|
|
||||||
"T10", # flake8-debugger
|
|
||||||
"T20", # flake8-print
|
|
||||||
"TC", # flake8-type-checking
|
|
||||||
"TID", # flake8-tidy-imports
|
|
||||||
"TRY", # tryceratops
|
|
||||||
"UP", # pyupgrade
|
|
||||||
"W", # pycodestyle warning
|
|
||||||
"YTT", # flake8-2020
|
|
||||||
]
|
|
||||||
ignore = [
|
ignore = [
|
||||||
|
"C90", # McCabe complexity
|
||||||
"COM812", # Messes with the formatter
|
"COM812", # Messes with the formatter
|
||||||
"ISC001", # Messes with the formatter
|
"FIX002", # Line contains TODO
|
||||||
"PERF203", # Rarely useful
|
"PERF203", # Rarely useful
|
||||||
"PLR09", # Too many something (args, statements, etc)
|
"PLR09", # Too many something (arg, statements, etc)
|
||||||
"S112", # Rarely useful
|
|
||||||
"RUF012", # Doesn't play well with Pydantic
|
"RUF012", # Doesn't play well with Pydantic
|
||||||
"TC001", # Doesn't play well with Pydantic
|
"TC001", # Doesn't play well with Pydantic
|
||||||
"TC002", # Doesn't play well with Pydantic
|
"TC002", # Doesn't play well with Pydantic
|
||||||
"TC003", # Doesn't play well with Pydantic
|
"TC003", # Doesn't play well with Pydantic
|
||||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
"TD002", # Missing author in TODO
|
||||||
|
"TD003", # Missing issue link in TODO
|
||||||
|
|
||||||
# TODO rules
|
# TODO rules
|
||||||
|
"ANN401",
|
||||||
|
"BLE",
|
||||||
"D100", # pydocstyle: missing docstring in public module
|
"D100", # pydocstyle: missing docstring in public module
|
||||||
"D104", # pydocstyle: missing docstring in public package
|
"D104", # pydocstyle: missing docstring in public package
|
||||||
|
"D2",
|
||||||
|
"D3",
|
||||||
|
"D4",
|
||||||
|
"ERA",
|
||||||
|
"N",
|
||||||
"PLC0415", # pylint: import-outside-top-level
|
"PLC0415", # pylint: import-outside-top-level
|
||||||
"TRY301", # tryceratops: raise-within-try
|
"TRY301", # tryceratops: raise-within-try
|
||||||
]
|
]
|
||||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
unfixable = [
|
||||||
|
"B028", # People should intentionally tune the stacklevel
|
||||||
|
"PLW1510", # People should intentionally set the check argument
|
||||||
|
]
|
||||||
|
|
||||||
|
flake8-annotations.allow-star-arg-any = true
|
||||||
|
flake8-annotations.mypy-init-return = true
|
||||||
|
flake8-type-checking.runtime-evaluated-base-classes = ["pydantic.BaseModel","langchain_core.load.serializable.Serializable","langchain_core.runnables.base.RunnableSerializable"]
|
||||||
|
pep8-naming.classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_init", "pydantic.field_validator", "pydantic.v1.root_validator",]
|
||||||
pydocstyle.convention = "google"
|
pydocstyle.convention = "google"
|
||||||
pyupgrade.keep-runtime-typing = true
|
pyupgrade.keep-runtime-typing = true
|
||||||
|
|
||||||
@ -218,6 +190,13 @@ pyupgrade.keep-runtime-typing = true
|
|||||||
"SLF001", # Private member access in tests
|
"SLF001", # Private member access in tests
|
||||||
"PLR2004", # Magic value comparisons
|
"PLR2004", # Magic value comparisons
|
||||||
]
|
]
|
||||||
|
"tests/integration_tests/examples/*.py" = [
|
||||||
|
"INP001", # Not a package
|
||||||
|
"EXE001", # Only examples
|
||||||
|
]
|
||||||
|
"scripts/*.py" = [
|
||||||
|
"INP001", # Not a package
|
||||||
|
]
|
||||||
"langchain/chains/constitutional_ai/principles.py" = [
|
"langchain/chains/constitutional_ai/principles.py" = [
|
||||||
"E501", # Line too long
|
"E501", # Line too long
|
||||||
]
|
]
|
||||||
|
0
libs/langchain/tests/unit_tests/_api/__init__.py
Normal file
0
libs/langchain/tests/unit_tests/_api/__init__.py
Normal file
@ -229,8 +229,8 @@ def test_agent_iterator_properties_and_setters() -> None:
|
|||||||
|
|
||||||
assert isinstance(agent_iter, AgentExecutorIterator)
|
assert isinstance(agent_iter, AgentExecutorIterator)
|
||||||
assert isinstance(agent_iter.inputs, dict)
|
assert isinstance(agent_iter.inputs, dict)
|
||||||
assert isinstance(agent_iter.callbacks, type(None))
|
assert agent_iter.callbacks is None
|
||||||
assert isinstance(agent_iter.tags, type(None))
|
assert agent_iter.tags is None
|
||||||
assert isinstance(agent_iter.agent_executor, AgentExecutor)
|
assert isinstance(agent_iter.agent_executor, AgentExecutor)
|
||||||
|
|
||||||
agent_iter.inputs = "New input" # type: ignore[assignment]
|
agent_iter.inputs = "New input" # type: ignore[assignment]
|
||||||
|
@ -22,12 +22,12 @@ class FakeEmbeddings(Embeddings):
|
|||||||
@override
|
@override
|
||||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||||
"""Return random floats."""
|
"""Return random floats."""
|
||||||
return [list(np.random.uniform(0, 1, 10)) for _ in range(10)]
|
return [list(np.random.default_rng().uniform(0, 1, 10)) for _ in range(10)]
|
||||||
|
|
||||||
@override
|
@override
|
||||||
def embed_query(self, text: str) -> list[float]:
|
def embed_query(self, text: str) -> list[float]:
|
||||||
"""Return random floats."""
|
"""Return random floats."""
|
||||||
return list(np.random.uniform(0, 1, 10))
|
return list(np.random.default_rng().uniform(0, 1, 10))
|
||||||
|
|
||||||
|
|
||||||
class FakeLLM(BaseLLM):
|
class FakeLLM(BaseLLM):
|
||||||
|
Loading…
Reference in New Issue
Block a user