From 444c2a3d9f2015f5e489112d051a48c9c5f07e4a Mon Sep 17 00:00:00 2001 From: arpitkumar980 <158285661+arpitkumar980@users.noreply.github.com> Date: Thu, 23 May 2024 03:38:31 +0530 Subject: [PATCH] community[patch]: sharepoint loader identity enabled (#21176) Thank you for contributing to LangChain! - [ ] **PR title**: "package: description" - Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes. - Example: "community: add foobar LLM" - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** a description of the change - **Issue:** the issue # it fixes, if applicable - **Dependencies:** any dependencies required for this change - **Twitter handle:** if your PR gets announced, and you'd like a mention, we'll gladly shout you out! - [ ] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines:https://github.com/arpitkumar980/langchain.git - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, hwchase17. --------- Co-authored-by: Harrison Chase Co-authored-by: Bagatur Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com> --- .../document_loaders/sharepoint.py | 57 ++++++++++++++++++- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/libs/community/langchain_community/document_loaders/sharepoint.py b/libs/community/langchain_community/document_loaders/sharepoint.py index f4d57d66d42..bfcc47fba1b 100644 --- a/libs/community/langchain_community/document_loaders/sharepoint.py +++ b/libs/community/langchain_community/document_loaders/sharepoint.py @@ -1,8 +1,12 @@ """Loader that loads data from Sharepoint Document Library""" from __future__ import annotations -from typing import Iterator, List, Optional, Sequence +import json +from pathlib import Path +from typing import Any, Iterator, List, Optional, Sequence +import requests +from langchain_core.document_loaders import BaseLoader from langchain_core.documents import Document from langchain_core.pydantic_v1 import Field @@ -13,7 +17,7 @@ from langchain_community.document_loaders.base_o365 import ( from langchain_community.document_loaders.parsers.registry import get_parser -class SharePointLoader(O365BaseLoader): +class SharePointLoader(O365BaseLoader, BaseLoader): """Load from `SharePoint`.""" document_library_id: str = Field(...) @@ -24,6 +28,14 @@ class SharePointLoader(O365BaseLoader): """ The IDs of the objects to load data from.""" folder_id: Optional[str] = None """ The ID of the folder to load data from.""" + load_auth: Optional[bool] = False + """ Whether to load authorization identities.""" + token_path: Path = Path.home() / ".credentials" / "o365_token.txt" + """ The path to the token to make api calls""" + file_id: Optional[str] = None + """ The ID of the file for which we need auth identities""" + site_id: Optional[str] = None + """ The ID of the Sharepoint site of the user where the file is present """ @property def _file_types(self) -> Sequence[_FileType]: @@ -52,7 +64,10 @@ class SharePointLoader(O365BaseLoader): if not isinstance(target_folder, Folder): raise ValueError(f"There isn't a folder with path {self.folder_path}.") for blob in self._load_from_folder(target_folder): - yield from blob_parser.lazy_parse(blob) + for parsed_blob in blob_parser.lazy_parse(blob): + auth_identities = self.authorized_identities() + parsed_blob.metadata["authorized_identities"] = auth_identities + yield parsed_blob if self.folder_id: target_folder = drive.get_item(self.folder_id) if not isinstance(target_folder, Folder): @@ -68,3 +83,39 @@ class SharePointLoader(O365BaseLoader): raise ValueError("Unable to fetch root folder") for blob in self._load_from_folder(target_folder): yield from blob_parser.lazy_parse(blob) + + def authorized_identities(self) -> List: + data = self._fetch_access_token() + access_token = data.get("access_token") + url = ( + f"https://graph.microsoft.com/v1.0/sites/{self.site_id}/" + f"drives/{self.document_library_id}/items/{self.file_id}/permissions" + ) + headers = {"Authorization": f"Bearer {access_token}"} + response = requests.request("GET", url, headers=headers, data={}) + groups_list = response.json() + + group_names = [] + + for group_data in groups_list.get("value"): + if group_data.get("grantedToV2"): + if group_data.get("grantedToV2").get("siteGroup"): + site_data = group_data.get("grantedToV2").get("siteGroup") + # print(group_data) + group_names.append(site_data.get("displayName")) + elif group_data.get("grantedToV2").get("group") or ( + group_data.get("grantedToV2").get("user") + ): + site_data = group_data.get("grantedToV2").get("group") or ( + group_data.get("grantedToV2").get("user") + ) + # print(group_data) + group_names.append(site_data.get("displayName")) + + return group_names + + def _fetch_access_token(self) -> Any: + with open(self.token_path) as f: + s = f.read() + data = json.loads(s) + return data