mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-12 15:59:56 +00:00
ruff: restore stacklevels, disable autofixing (#31919)
This commit is contained in:
parent
cbb418b4bf
commit
71b361936d
@ -101,6 +101,8 @@ ignore = [
|
||||
"PLC0415",
|
||||
"PLR2004",
|
||||
]
|
||||
unfixable = ["PLW1510",]
|
||||
|
||||
flake8-annotations.allow-star-arg-any = true
|
||||
flake8-annotations.mypy-init-return = true
|
||||
flake8-type-checking.runtime-evaluated-base-classes = ["pydantic.BaseModel","langchain_core.load.serializable.Serializable","langchain_core.runnables.base.RunnableSerializable"]
|
||||
|
@ -29,12 +29,12 @@ def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
|
||||
warnings.warn(
|
||||
f"Importing {name} from langchain root module is no longer supported. "
|
||||
f"Please use {replacement} instead.",
|
||||
stacklevel=2,
|
||||
stacklevel=3,
|
||||
)
|
||||
else:
|
||||
warnings.warn(
|
||||
f"Importing {name} from langchain root module is no longer supported.",
|
||||
stacklevel=2,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
||||
|
@ -255,7 +255,7 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC):
|
||||
warnings.warn(
|
||||
"callback_manager is deprecated. Please use callbacks instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
stacklevel=4,
|
||||
)
|
||||
values["callbacks"] = values.pop("callback_manager", None)
|
||||
return values
|
||||
|
@ -504,7 +504,7 @@ class ChatVectorDBChain(BaseConversationalRetrievalChain):
|
||||
warnings.warn(
|
||||
"`ChatVectorDBChain` is deprecated - "
|
||||
"please use `from langchain.chains import ConversationalRetrievalChain`",
|
||||
stacklevel=2,
|
||||
stacklevel=4,
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -112,7 +112,7 @@ class LLMCheckerChain(Chain):
|
||||
"Directly instantiating an LLMCheckerChain with an llm is deprecated. "
|
||||
"Please instantiate with question_to_checked_assertions_chain "
|
||||
"or using the from_llm class method.",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
if (
|
||||
"question_to_checked_assertions_chain" not in values
|
||||
|
@ -177,7 +177,7 @@ class LLMMathChain(Chain):
|
||||
"Directly instantiating an LLMMathChain with an llm is deprecated. "
|
||||
"Please instantiate with llm_chain argument or using the from_llm "
|
||||
"class method.",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
if "llm_chain" not in values and values["llm"] is not None:
|
||||
prompt = values.get("prompt", PROMPT)
|
||||
|
@ -118,7 +118,7 @@ class LLMSummarizationCheckerChain(Chain):
|
||||
"Directly instantiating an LLMSummarizationCheckerChain with an llm is "
|
||||
"deprecated. Please instantiate with"
|
||||
" sequential_chain argument or using the from_llm class method.",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
if "sequential_chain" not in values and values["llm"] is not None:
|
||||
values["sequential_chain"] = _load_sequential_chain(
|
||||
|
@ -74,7 +74,7 @@ class NatBotChain(Chain):
|
||||
"Directly instantiating an NatBotChain with an llm is deprecated. "
|
||||
"Please instantiate with llm_chain argument or using the from_llm "
|
||||
"class method.",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
if "llm_chain" not in values and values["llm"] is not None:
|
||||
values["llm_chain"] = PROMPT | values["llm"] | StrOutputParser()
|
||||
|
@ -77,7 +77,7 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain):
|
||||
warnings.warn(
|
||||
"`VectorDBQAWithSourcesChain` is deprecated - "
|
||||
"please use `from langchain.chains import RetrievalQAWithSourcesChain`",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -162,7 +162,7 @@ class QueryTransformer(Transformer):
|
||||
warnings.warn(
|
||||
"Dates are expected to be provided in ISO 8601 date format "
|
||||
"(YYYY-MM-DD).",
|
||||
stacklevel=2,
|
||||
stacklevel=3,
|
||||
)
|
||||
return {"date": item, "type": "date"}
|
||||
|
||||
|
@ -124,12 +124,12 @@ class _EvalArgsMixin:
|
||||
msg = f"{self.__class__.__name__} requires an input string."
|
||||
raise ValueError(msg)
|
||||
if input_ is not None and not self.requires_input:
|
||||
warn(self._skip_input_warning, stacklevel=2)
|
||||
warn(self._skip_input_warning, stacklevel=3)
|
||||
if self.requires_reference and reference is None:
|
||||
msg = f"{self.__class__.__name__} requires a reference string."
|
||||
raise ValueError(msg)
|
||||
if reference is not None and not self.requires_reference:
|
||||
warn(self._skip_reference_warning, stacklevel=2)
|
||||
warn(self._skip_reference_warning, stacklevel=3)
|
||||
|
||||
|
||||
class StringEvaluator(_EvalArgsMixin, ABC):
|
||||
|
@ -183,7 +183,7 @@ def _get_in_memory_vectorstore() -> type[VectorStore]:
|
||||
"Using InMemoryVectorStore as the default vectorstore."
|
||||
"This memory store won't persist data. You should explicitly"
|
||||
"specify a vectorstore when using VectorstoreIndexCreator",
|
||||
stacklevel=2,
|
||||
stacklevel=3,
|
||||
)
|
||||
return InMemoryVectorStore
|
||||
|
||||
|
@ -58,7 +58,7 @@ class BaseChatMemory(BaseMemory, ABC):
|
||||
f"'{self.__class__.__name__}' got multiple output keys:"
|
||||
f" {outputs.keys()}. The default 'output' key is being used."
|
||||
f" If this is not desired, please manually set 'output_key'.",
|
||||
stacklevel=2,
|
||||
stacklevel=3,
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
|
@ -42,7 +42,7 @@ class CombinedMemory(BaseMemory):
|
||||
"When using CombinedMemory, "
|
||||
"input keys should be so the input is known. "
|
||||
f" Was not set on {val}",
|
||||
stacklevel=2,
|
||||
stacklevel=5,
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -198,6 +198,7 @@ ignore = [
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
pydocstyle.convention = "google"
|
||||
pyupgrade.keep-runtime-typing = true
|
||||
|
@ -114,6 +114,7 @@ ignore = [
|
||||
"UP045", # non-pep604-annotation-optional
|
||||
"SIM105", # Rarely useful
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -109,6 +109,7 @@ ignore = [
|
||||
"D107", # Missing docstring in __init__
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -95,6 +95,7 @@ ignore = [
|
||||
"D107", # Missing docstring in __init__
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -100,6 +100,7 @@ ignore = [
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -104,6 +104,7 @@ ignore = [
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -97,6 +97,7 @@ ignore = [
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -112,6 +112,7 @@ ignore = [
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
@ -104,6 +104,7 @@ ignore = [
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
"UP045", # pyupgrade: non-pep604-annotation-optional
|
||||
]
|
||||
unfixable = ["B028"] # People should intentionally tune the stacklevel
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["tests/*"]
|
||||
|
Loading…
Reference in New Issue
Block a user