mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-18 08:03:36 +00:00
community[patch]: add NotebookLoader unit test (#17721)
Thank you for contributing to LangChain! - **Description:** added unit tests for NotebookLoader. Linked PR: https://github.com/langchain-ai/langchain/pull/17614 - **Issue:** [#17614](https://github.com/langchain-ai/langchain/pull/17614) - **Twitter handle:** @paulodoestech - [x] Pass lint and test: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified to check that you're passing lint and testing. See contribution guidelines for more information on how to write/run tests, lint, etc: https://python.langchain.com/docs/contributing/ - [x] Add tests and docs: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, hwchase17. --------- Co-authored-by: lachiewalker <lachiewalker1@hotmail.com> Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com> Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
@@ -25,7 +25,11 @@ def concatenate_cells(
|
||||
"""
|
||||
cell_type = cell["cell_type"]
|
||||
source = cell["source"]
|
||||
output = cell["outputs"]
|
||||
if include_outputs:
|
||||
try:
|
||||
output = cell["outputs"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if include_outputs and cell_type == "code" and output:
|
||||
if "ename" in output[0].keys():
|
||||
@@ -58,14 +62,13 @@ def concatenate_cells(
|
||||
|
||||
def remove_newlines(x: Any) -> Any:
|
||||
"""Recursively remove newlines, no matter the data structure they are stored in."""
|
||||
import pandas as pd
|
||||
|
||||
if isinstance(x, str):
|
||||
return x.replace("\n", "")
|
||||
elif isinstance(x, list):
|
||||
return [remove_newlines(elem) for elem in x]
|
||||
elif isinstance(x, pd.DataFrame):
|
||||
return x.applymap(remove_newlines)
|
||||
elif isinstance(x, dict):
|
||||
return {k: remove_newlines(v) for (k, v) in x.items()}
|
||||
else:
|
||||
return x
|
||||
|
||||
@@ -104,29 +107,29 @@ class NotebookLoader(BaseLoader):
|
||||
self,
|
||||
) -> List[Document]:
|
||||
"""Load documents."""
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"pandas is needed for Notebook Loader, "
|
||||
"please install with `pip install pandas`"
|
||||
)
|
||||
p = Path(self.file_path)
|
||||
|
||||
with open(p, encoding="utf8") as f:
|
||||
d = json.load(f)
|
||||
|
||||
data = pd.json_normalize(d["cells"])
|
||||
filtered_data = data[["cell_type", "source", "outputs"]]
|
||||
if self.remove_newline:
|
||||
filtered_data = filtered_data.applymap(remove_newlines)
|
||||
filtered_data = [
|
||||
{k: v for (k, v) in cell.items() if k in ["cell_type", "source", "outputs"]}
|
||||
for cell in d["cells"]
|
||||
]
|
||||
|
||||
text = filtered_data.apply(
|
||||
lambda x: concatenate_cells(
|
||||
x, self.include_outputs, self.max_output_length, self.traceback
|
||||
),
|
||||
axis=1,
|
||||
).str.cat(sep=" ")
|
||||
if self.remove_newline:
|
||||
filtered_data = list(map(remove_newlines, filtered_data))
|
||||
|
||||
text = "".join(
|
||||
list(
|
||||
map(
|
||||
lambda x: concatenate_cells(
|
||||
x, self.include_outputs, self.max_output_length, self.traceback
|
||||
),
|
||||
filtered_data,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
metadata = {"source": str(p)}
|
||||
|
||||
|
Reference in New Issue
Block a user