mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-10 15:06:18 +00:00
langchain: Add ruff rules PIE (#31880)
All auto-fixes See https://docs.astral.sh/ruff/rules/#flake8-pie-pie
This commit is contained in:
parent
a46a2b8bda
commit
53c75abba2
@ -449,7 +449,7 @@ class RunnableAgent(BaseSingleActionAgent):
|
|||||||
Returns:
|
Returns:
|
||||||
Action specifying what tool to use.
|
Action specifying what tool to use.
|
||||||
"""
|
"""
|
||||||
inputs = {**kwargs, **{"intermediate_steps": intermediate_steps}}
|
inputs = {**kwargs, "intermediate_steps": intermediate_steps}
|
||||||
final_output: Any = None
|
final_output: Any = None
|
||||||
if self.stream_runnable:
|
if self.stream_runnable:
|
||||||
# Use streaming to make sure that the underlying LLM is invoked in a
|
# Use streaming to make sure that the underlying LLM is invoked in a
|
||||||
@ -488,7 +488,7 @@ class RunnableAgent(BaseSingleActionAgent):
|
|||||||
Returns:
|
Returns:
|
||||||
Action specifying what tool to use.
|
Action specifying what tool to use.
|
||||||
"""
|
"""
|
||||||
inputs = {**kwargs, **{"intermediate_steps": intermediate_steps}}
|
inputs = {**kwargs, "intermediate_steps": intermediate_steps}
|
||||||
final_output: Any = None
|
final_output: Any = None
|
||||||
if self.stream_runnable:
|
if self.stream_runnable:
|
||||||
# Use streaming to make sure that the underlying LLM is invoked in a
|
# Use streaming to make sure that the underlying LLM is invoked in a
|
||||||
@ -565,7 +565,7 @@ class RunnableMultiActionAgent(BaseMultiActionAgent):
|
|||||||
Returns:
|
Returns:
|
||||||
Action specifying what tool to use.
|
Action specifying what tool to use.
|
||||||
"""
|
"""
|
||||||
inputs = {**kwargs, **{"intermediate_steps": intermediate_steps}}
|
inputs = {**kwargs, "intermediate_steps": intermediate_steps}
|
||||||
final_output: Any = None
|
final_output: Any = None
|
||||||
if self.stream_runnable:
|
if self.stream_runnable:
|
||||||
# Use streaming to make sure that the underlying LLM is invoked in a
|
# Use streaming to make sure that the underlying LLM is invoked in a
|
||||||
@ -604,7 +604,7 @@ class RunnableMultiActionAgent(BaseMultiActionAgent):
|
|||||||
Returns:
|
Returns:
|
||||||
Action specifying what tool to use.
|
Action specifying what tool to use.
|
||||||
"""
|
"""
|
||||||
inputs = {**kwargs, **{"intermediate_steps": intermediate_steps}}
|
inputs = {**kwargs, "intermediate_steps": intermediate_steps}
|
||||||
final_output: Any = None
|
final_output: Any = None
|
||||||
if self.stream_runnable:
|
if self.stream_runnable:
|
||||||
# Use streaming to make sure that the underlying LLM is invoked in a
|
# Use streaming to make sure that the underlying LLM is invoked in a
|
||||||
@ -907,8 +907,6 @@ class Agent(BaseSingleActionAgent):
|
|||||||
tools: Tools to use.
|
tools: Tools to use.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _get_default_output_parser(cls, **kwargs: Any) -> AgentOutputParser:
|
def _get_default_output_parser(cls, **kwargs: Any) -> AgentOutputParser:
|
||||||
|
@ -270,7 +270,7 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain):
|
|||||||
"""
|
"""
|
||||||
map_results = await self.llm_chain.aapply(
|
map_results = await self.llm_chain.aapply(
|
||||||
# FYI - this is parallelized and so it is fast.
|
# FYI - this is parallelized and so it is fast.
|
||||||
[{**{self.document_variable_name: d.page_content}, **kwargs} for d in docs],
|
[{self.document_variable_name: d.page_content, **kwargs} for d in docs],
|
||||||
callbacks=callbacks,
|
callbacks=callbacks,
|
||||||
)
|
)
|
||||||
question_result_key = self.llm_chain.output_key
|
question_result_key = self.llm_chain.output_key
|
||||||
|
@ -182,7 +182,7 @@ class MapRerankDocumentsChain(BaseCombineDocumentsChain):
|
|||||||
"""
|
"""
|
||||||
results = self.llm_chain.apply_and_parse(
|
results = self.llm_chain.apply_and_parse(
|
||||||
# FYI - this is parallelized and so it is fast.
|
# FYI - this is parallelized and so it is fast.
|
||||||
[{**{self.document_variable_name: d.page_content}, **kwargs} for d in docs],
|
[{self.document_variable_name: d.page_content, **kwargs} for d in docs],
|
||||||
callbacks=callbacks,
|
callbacks=callbacks,
|
||||||
)
|
)
|
||||||
return self._process_results(docs, results)
|
return self._process_results(docs, results)
|
||||||
@ -206,7 +206,7 @@ class MapRerankDocumentsChain(BaseCombineDocumentsChain):
|
|||||||
"""
|
"""
|
||||||
results = await self.llm_chain.aapply_and_parse(
|
results = await self.llm_chain.aapply_and_parse(
|
||||||
# FYI - this is parallelized and so it is fast.
|
# FYI - this is parallelized and so it is fast.
|
||||||
[{**{self.document_variable_name: d.page_content}, **kwargs} for d in docs],
|
[{self.document_variable_name: d.page_content, **kwargs} for d in docs],
|
||||||
callbacks=callbacks,
|
callbacks=callbacks,
|
||||||
)
|
)
|
||||||
return self._process_results(docs, results)
|
return self._process_results(docs, results)
|
||||||
|
@ -40,27 +40,22 @@ class BaseEntityStore(BaseModel, ABC):
|
|||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get(self, key: str, default: Optional[str] = None) -> Optional[str]:
|
def get(self, key: str, default: Optional[str] = None) -> Optional[str]:
|
||||||
"""Get entity value from store."""
|
"""Get entity value from store."""
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def set(self, key: str, value: Optional[str]) -> None:
|
def set(self, key: str, value: Optional[str]) -> None:
|
||||||
"""Set entity value in store."""
|
"""Set entity value in store."""
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def delete(self, key: str) -> None:
|
def delete(self, key: str) -> None:
|
||||||
"""Delete entity value from store."""
|
"""Delete entity value from store."""
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def exists(self, key: str) -> bool:
|
def exists(self, key: str) -> bool:
|
||||||
"""Check if entity exists in store."""
|
"""Check if entity exists in store."""
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
"""Delete all entities from store."""
|
"""Delete all entities from store."""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
|
@ -19,8 +19,6 @@ class ReadOnlySharedMemory(BaseMemory):
|
|||||||
|
|
||||||
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
|
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
|
||||||
"""Nothing should be saved or changed"""
|
"""Nothing should be saved or changed"""
|
||||||
pass
|
|
||||||
|
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
"""Nothing to clear, got a memory like a vault."""
|
"""Nothing to clear, got a memory like a vault."""
|
||||||
pass
|
|
||||||
|
@ -19,8 +19,6 @@ class SimpleMemory(BaseMemory):
|
|||||||
|
|
||||||
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
|
def save_context(self, inputs: dict[str, Any], outputs: dict[str, str]) -> None:
|
||||||
"""Nothing should be saved or changed, my memory is set in stone."""
|
"""Nothing should be saved or changed, my memory is set in stone."""
|
||||||
pass
|
|
||||||
|
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
"""Nothing to clear, got a memory like a vault."""
|
"""Nothing to clear, got a memory like a vault."""
|
||||||
pass
|
|
||||||
|
@ -98,10 +98,8 @@ class TestResult(dict):
|
|||||||
to_drop = [
|
to_drop = [
|
||||||
col
|
col
|
||||||
for col in df.columns
|
for col in df.columns
|
||||||
if col.startswith("inputs.")
|
if col.startswith(("inputs.", "outputs.", "reference"))
|
||||||
or col.startswith("outputs.")
|
|
||||||
or col in {"input", "output"}
|
or col in {"input", "output"}
|
||||||
or col.startswith("reference")
|
|
||||||
]
|
]
|
||||||
return df.describe(include="all").drop(to_drop, axis=1)
|
return df.describe(include="all").drop(to_drop, axis=1)
|
||||||
|
|
||||||
|
@ -143,7 +143,7 @@ ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
|
|||||||
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
|
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = ["E", "F", "I", "PGH003", "T201", "D", "UP", "S", "W"]
|
select = ["E", "F", "I", "PGH003", "PIE", "T201", "D", "UP", "S", "W"]
|
||||||
pydocstyle.convention = "google"
|
pydocstyle.convention = "google"
|
||||||
pyupgrade.keep-runtime-typing = true
|
pyupgrade.keep-runtime-typing = true
|
||||||
|
|
||||||
|
@ -4,4 +4,3 @@ import pytest
|
|||||||
@pytest.mark.compile
|
@pytest.mark.compile
|
||||||
def test_placeholder() -> None:
|
def test_placeholder() -> None:
|
||||||
"""Used for compiling integration tests without running any real tests."""
|
"""Used for compiling integration tests without running any real tests."""
|
||||||
pass
|
|
||||||
|
Loading…
Reference in New Issue
Block a user