mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-19 21:33:51 +00:00
core: Add ruff rules PTH (pathlib) (#29338)
See https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth Co-authored-by: ccurme <chester.curme@gmail.com>
This commit is contained in:
parent
86b364de3b
commit
9e6ffd1264
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Any, Optional, TextIO, cast
|
from typing import TYPE_CHECKING, Any, Optional, TextIO, cast
|
||||||
|
|
||||||
from langchain_core.callbacks import BaseCallbackHandler
|
from langchain_core.callbacks import BaseCallbackHandler
|
||||||
@ -30,7 +31,7 @@ class FileCallbackHandler(BaseCallbackHandler):
|
|||||||
mode: The mode to open the file in. Defaults to "a".
|
mode: The mode to open the file in. Defaults to "a".
|
||||||
color: The color to use for the text. Defaults to None.
|
color: The color to use for the text. Defaults to None.
|
||||||
"""
|
"""
|
||||||
self.file = cast(TextIO, open(filename, mode, encoding="utf-8")) # noqa: SIM115
|
self.file = cast(TextIO, Path(filename).open(mode, encoding="utf-8")) # noqa: SIM115
|
||||||
self.color = color
|
self.color = color
|
||||||
|
|
||||||
def __del__(self) -> None:
|
def __del__(self) -> None:
|
||||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
import contextlib
|
import contextlib
|
||||||
import mimetypes
|
import mimetypes
|
||||||
from io import BufferedReader, BytesIO
|
from io import BufferedReader, BytesIO
|
||||||
from pathlib import PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
|
from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
|
||||||
|
|
||||||
from pydantic import ConfigDict, Field, field_validator, model_validator
|
from pydantic import ConfigDict, Field, field_validator, model_validator
|
||||||
@ -151,8 +151,7 @@ class Blob(BaseMedia):
|
|||||||
def as_string(self) -> str:
|
def as_string(self) -> str:
|
||||||
"""Read data as a string."""
|
"""Read data as a string."""
|
||||||
if self.data is None and self.path:
|
if self.data is None and self.path:
|
||||||
with open(str(self.path), encoding=self.encoding) as f:
|
return Path(self.path).read_text(encoding=self.encoding)
|
||||||
return f.read()
|
|
||||||
elif isinstance(self.data, bytes):
|
elif isinstance(self.data, bytes):
|
||||||
return self.data.decode(self.encoding)
|
return self.data.decode(self.encoding)
|
||||||
elif isinstance(self.data, str):
|
elif isinstance(self.data, str):
|
||||||
@ -168,8 +167,7 @@ class Blob(BaseMedia):
|
|||||||
elif isinstance(self.data, str):
|
elif isinstance(self.data, str):
|
||||||
return self.data.encode(self.encoding)
|
return self.data.encode(self.encoding)
|
||||||
elif self.data is None and self.path:
|
elif self.data is None and self.path:
|
||||||
with open(str(self.path), "rb") as f:
|
return Path(self.path).read_bytes()
|
||||||
return f.read()
|
|
||||||
else:
|
else:
|
||||||
msg = f"Unable to get bytes for blob {self}"
|
msg = f"Unable to get bytes for blob {self}"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
@ -180,7 +178,7 @@ class Blob(BaseMedia):
|
|||||||
if isinstance(self.data, bytes):
|
if isinstance(self.data, bytes):
|
||||||
yield BytesIO(self.data)
|
yield BytesIO(self.data)
|
||||||
elif self.data is None and self.path:
|
elif self.data is None and self.path:
|
||||||
with open(str(self.path), "rb") as f:
|
with Path(self.path).open("rb") as f:
|
||||||
yield f
|
yield f
|
||||||
else:
|
else:
|
||||||
msg = f"Unable to convert blob {self}"
|
msg = f"Unable to convert blob {self}"
|
||||||
|
@ -1402,7 +1402,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
llm.save(file_path="path/llm.yaml")
|
llm.save(file_path="path/llm.yaml")
|
||||||
"""
|
"""
|
||||||
# Convert file to Path object.
|
# Convert file to Path object.
|
||||||
save_path = Path(file_path) if isinstance(file_path, str) else file_path
|
save_path = Path(file_path)
|
||||||
|
|
||||||
directory_path = save_path.parent
|
directory_path = save_path.parent
|
||||||
directory_path.mkdir(parents=True, exist_ok=True)
|
directory_path.mkdir(parents=True, exist_ok=True)
|
||||||
@ -1411,10 +1411,10 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
prompt_dict = self.dict()
|
prompt_dict = self.dict()
|
||||||
|
|
||||||
if save_path.suffix == ".json":
|
if save_path.suffix == ".json":
|
||||||
with open(file_path, "w") as f:
|
with save_path.open("w") as f:
|
||||||
json.dump(prompt_dict, f, indent=4)
|
json.dump(prompt_dict, f, indent=4)
|
||||||
elif save_path.suffix.endswith((".yaml", ".yml")):
|
elif save_path.suffix.endswith((".yaml", ".yml")):
|
||||||
with open(file_path, "w") as f:
|
with save_path.open("w") as f:
|
||||||
yaml.dump(prompt_dict, f, default_flow_style=False)
|
yaml.dump(prompt_dict, f, default_flow_style=False)
|
||||||
else:
|
else:
|
||||||
msg = f"{save_path} must be json or yaml"
|
msg = f"{save_path} must be json or yaml"
|
||||||
|
@ -368,16 +368,16 @@ class BasePromptTemplate(
|
|||||||
raise NotImplementedError(msg)
|
raise NotImplementedError(msg)
|
||||||
|
|
||||||
# Convert file to Path object.
|
# Convert file to Path object.
|
||||||
save_path = Path(file_path) if isinstance(file_path, str) else file_path
|
save_path = Path(file_path)
|
||||||
|
|
||||||
directory_path = save_path.parent
|
directory_path = save_path.parent
|
||||||
directory_path.mkdir(parents=True, exist_ok=True)
|
directory_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
if save_path.suffix == ".json":
|
if save_path.suffix == ".json":
|
||||||
with open(file_path, "w") as f:
|
with save_path.open("w") as f:
|
||||||
json.dump(prompt_dict, f, indent=4)
|
json.dump(prompt_dict, f, indent=4)
|
||||||
elif save_path.suffix.endswith((".yaml", ".yml")):
|
elif save_path.suffix.endswith((".yaml", ".yml")):
|
||||||
with open(file_path, "w") as f:
|
with save_path.open("w") as f:
|
||||||
yaml.dump(prompt_dict, f, default_flow_style=False)
|
yaml.dump(prompt_dict, f, default_flow_style=False)
|
||||||
else:
|
else:
|
||||||
msg = f"{save_path} must be json or yaml"
|
msg = f"{save_path} must be json or yaml"
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Annotated,
|
Annotated,
|
||||||
@ -48,7 +49,6 @@ from langchain_core.utils.interactive_env import is_interactive_env
|
|||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
class BaseMessagePromptTemplate(Serializable, ABC):
|
class BaseMessagePromptTemplate(Serializable, ABC):
|
||||||
@ -599,8 +599,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
A new instance of this class.
|
A new instance of this class.
|
||||||
"""
|
"""
|
||||||
with open(str(template_file)) as f:
|
template = Path(template_file).read_text()
|
||||||
template = f.read()
|
|
||||||
return cls.from_template(template, input_variables=input_variables, **kwargs)
|
return cls.from_template(template, input_variables=input_variables, **kwargs)
|
||||||
|
|
||||||
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||||
|
@ -53,8 +53,7 @@ def _load_template(var_name: str, config: dict) -> dict:
|
|||||||
template_path = Path(config.pop(f"{var_name}_path"))
|
template_path = Path(config.pop(f"{var_name}_path"))
|
||||||
# Load the template.
|
# Load the template.
|
||||||
if template_path.suffix == ".txt":
|
if template_path.suffix == ".txt":
|
||||||
with open(template_path) as f:
|
template = template_path.read_text()
|
||||||
template = f.read()
|
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
# Set the template variable to the extracted variable.
|
# Set the template variable to the extracted variable.
|
||||||
@ -67,10 +66,11 @@ def _load_examples(config: dict) -> dict:
|
|||||||
if isinstance(config["examples"], list):
|
if isinstance(config["examples"], list):
|
||||||
pass
|
pass
|
||||||
elif isinstance(config["examples"], str):
|
elif isinstance(config["examples"], str):
|
||||||
with open(config["examples"]) as f:
|
path = Path(config["examples"])
|
||||||
if config["examples"].endswith(".json"):
|
with path.open() as f:
|
||||||
|
if path.suffix == ".json":
|
||||||
examples = json.load(f)
|
examples = json.load(f)
|
||||||
elif config["examples"].endswith((".yaml", ".yml")):
|
elif path.suffix in {".yaml", ".yml"}:
|
||||||
examples = yaml.safe_load(f)
|
examples = yaml.safe_load(f)
|
||||||
else:
|
else:
|
||||||
msg = "Invalid file format. Only json or yaml formats are supported."
|
msg = "Invalid file format. Only json or yaml formats are supported."
|
||||||
@ -168,13 +168,13 @@ def _load_prompt_from_file(
|
|||||||
) -> BasePromptTemplate:
|
) -> BasePromptTemplate:
|
||||||
"""Load prompt from file."""
|
"""Load prompt from file."""
|
||||||
# Convert file to a Path object.
|
# Convert file to a Path object.
|
||||||
file_path = Path(file) if isinstance(file, str) else file
|
file_path = Path(file)
|
||||||
# Load from either json or yaml.
|
# Load from either json or yaml.
|
||||||
if file_path.suffix == ".json":
|
if file_path.suffix == ".json":
|
||||||
with open(file_path, encoding=encoding) as f:
|
with file_path.open(encoding=encoding) as f:
|
||||||
config = json.load(f)
|
config = json.load(f)
|
||||||
elif file_path.suffix.endswith((".yaml", ".yml")):
|
elif file_path.suffix.endswith((".yaml", ".yml")):
|
||||||
with open(file_path, encoding=encoding) as f:
|
with file_path.open(encoding=encoding) as f:
|
||||||
config = yaml.safe_load(f)
|
config = yaml.safe_load(f)
|
||||||
else:
|
else:
|
||||||
msg = f"Got unsupported file type {file_path.suffix}"
|
msg = f"Got unsupported file type {file_path.suffix}"
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, model_validator
|
from pydantic import BaseModel, model_validator
|
||||||
@ -17,8 +18,6 @@ from langchain_core.prompts.string import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from langchain_core.runnables.config import RunnableConfig
|
from langchain_core.runnables.config import RunnableConfig
|
||||||
|
|
||||||
|
|
||||||
@ -238,8 +237,7 @@ class PromptTemplate(StringPromptTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
The prompt loaded from the file.
|
The prompt loaded from the file.
|
||||||
"""
|
"""
|
||||||
with open(str(template_file), encoding=encoding) as f:
|
template = Path(template_file).read_text(encoding=encoding)
|
||||||
template = f.read()
|
|
||||||
if input_variables:
|
if input_variables:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"`input_variables' is deprecated and ignored.",
|
"`input_variables' is deprecated and ignored.",
|
||||||
|
@ -2,6 +2,7 @@ import asyncio
|
|||||||
import base64
|
import base64
|
||||||
import re
|
import re
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
|
from pathlib import Path
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
|
|
||||||
from langchain_core.runnables.graph import (
|
from langchain_core.runnables.graph import (
|
||||||
@ -290,13 +291,9 @@ async def _render_mermaid_using_pyppeteer(
|
|||||||
img_bytes = await page.screenshot({"fullPage": False})
|
img_bytes = await page.screenshot({"fullPage": False})
|
||||||
await browser.close()
|
await browser.close()
|
||||||
|
|
||||||
def write_to_file(path: str, bytes: bytes) -> None:
|
|
||||||
with open(path, "wb") as file:
|
|
||||||
file.write(bytes)
|
|
||||||
|
|
||||||
if output_file_path is not None:
|
if output_file_path is not None:
|
||||||
await asyncio.get_event_loop().run_in_executor(
|
await asyncio.get_event_loop().run_in_executor(
|
||||||
None, write_to_file, output_file_path, img_bytes
|
None, Path(output_file_path).write_bytes, img_bytes
|
||||||
)
|
)
|
||||||
|
|
||||||
return img_bytes
|
return img_bytes
|
||||||
@ -337,8 +334,7 @@ def _render_mermaid_using_api(
|
|||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
img_bytes = response.content
|
img_bytes = response.content
|
||||||
if output_file_path is not None:
|
if output_file_path is not None:
|
||||||
with open(output_file_path, "wb") as file:
|
Path(output_file_path).write_bytes(response.content)
|
||||||
file.write(response.content)
|
|
||||||
|
|
||||||
return img_bytes
|
return img_bytes
|
||||||
else:
|
else:
|
||||||
|
@ -77,7 +77,7 @@ target-version = "py39"
|
|||||||
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [ "ANN", "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TC", "TID", "TRY", "UP", "W", "YTT",]
|
select = [ "ANN", "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "PTH", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TC", "TID", "TRY", "UP", "W", "YTT",]
|
||||||
ignore = [ "ANN401", "COM812", "UP007", "S110", "S112", "TC001", "TC002", "TC003"]
|
ignore = [ "ANN401", "COM812", "UP007", "S110", "S112", "TC001", "TC002", "TC003"]
|
||||||
flake8-type-checking.runtime-evaluated-base-classes = ["pydantic.BaseModel","langchain_core.load.serializable.Serializable","langchain_core.runnables.base.RunnableSerializable"]
|
flake8-type-checking.runtime-evaluated-base-classes = ["pydantic.BaseModel","langchain_core.load.serializable.Serializable","langchain_core.runnables.base.RunnableSerializable"]
|
||||||
flake8-annotations.allow-star-arg-any = true
|
flake8-annotations.allow-star-arg-any = true
|
||||||
|
@ -354,7 +354,7 @@ def test_prompt_from_file_with_partial_variables() -> None:
|
|||||||
template = "This is a {foo} test {bar}."
|
template = "This is a {foo} test {bar}."
|
||||||
partial_variables = {"bar": "baz"}
|
partial_variables = {"bar": "baz"}
|
||||||
# when
|
# when
|
||||||
with mock.patch("builtins.open", mock.mock_open(read_data=template)):
|
with mock.patch("pathlib.Path.open", mock.mock_open(read_data=template)):
|
||||||
prompt = PromptTemplate.from_file(
|
prompt = PromptTemplate.from_file(
|
||||||
"mock_file_name", partial_variables=partial_variables
|
"mock_file_name", partial_variables=partial_variables
|
||||||
)
|
)
|
||||||
|
@ -1,16 +1,13 @@
|
|||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import glob
|
|
||||||
import importlib
|
import importlib
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
def test_importable_all() -> None:
|
def test_importable_all() -> None:
|
||||||
for path in glob.glob("../core/langchain_core/*"):
|
for path in Path("../core/langchain_core/").glob("*"):
|
||||||
relative_path = Path(path).parts[-1]
|
module_name = path.stem
|
||||||
if relative_path.endswith(".typed"):
|
if not module_name.startswith(".") and path.suffix != ".typed":
|
||||||
continue
|
|
||||||
module_name = relative_path.split(".")[0]
|
|
||||||
module = importlib.import_module("langchain_core." + module_name)
|
module = importlib.import_module("langchain_core." + module_name)
|
||||||
all_ = getattr(module, "__all__", [])
|
all_ = getattr(module, "__all__", [])
|
||||||
for cls_ in all_:
|
for cls_ in all_:
|
||||||
@ -37,11 +34,9 @@ def test_importable_all_via_subprocess() -> None:
|
|||||||
for one sequence of imports but not another.
|
for one sequence of imports but not another.
|
||||||
"""
|
"""
|
||||||
module_names = []
|
module_names = []
|
||||||
for path in glob.glob("../core/langchain_core/*"):
|
for path in Path("../core/langchain_core/").glob("*"):
|
||||||
relative_path = Path(path).parts[-1]
|
module_name = path.stem
|
||||||
if relative_path.endswith(".typed"):
|
if not module_name.startswith(".") and path.suffix != ".typed":
|
||||||
continue
|
|
||||||
module_name = relative_path.split(".")[0]
|
|
||||||
module_names.append(module_name)
|
module_names.append(module_name)
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
Loading…
Reference in New Issue
Block a user