Compare commits

...

2 Commits

Author SHA1 Message Date
Harrison Chase
4c3326becf cr 2023-09-16 22:59:28 -07:00
Harrison Chase
f9db904b12 test for more import cycles 2023-09-16 22:33:48 -07:00
330 changed files with 474 additions and 399 deletions

View File

@@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains.llm import LLMChain
from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS
from langchain.embeddings.base import Embeddings
from langchain.schema.embeddings import Embeddings
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import BaseOutputParser, BasePromptTemplate
from langchain.schema.language_model import BaseLanguageModel

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import importlib
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
Dict,
@@ -15,7 +16,6 @@ from typing import (
from typing_extensions import Literal
from langchain.chat_loaders.base import ChatSession
from langchain.schema.messages import (
AIMessage,
AIMessageChunk,
@@ -27,6 +27,9 @@ from langchain.schema.messages import (
SystemMessage,
)
if TYPE_CHECKING:
from langchain.chat_loaders.base import ChatSession
async def aenumerate(
iterable: AsyncIterator[Any], start: int = 0

View File

@@ -8,7 +8,7 @@ from langchain.chains.api.openapi.chain import OpenAPIEndpointChain
from langchain.schema.language_model import BaseLanguageModel
from langchain.tools.openapi.utils.api_models import APIOperation
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
from langchain.utilities.requests import Requests
from langchain.utils.requests import Requests
class NLATool(Tool):

View File

@@ -9,7 +9,7 @@ from langchain.schema.language_model import BaseLanguageModel
from langchain.tools.base import BaseTool
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
from langchain.tools.plugin import AIPlugin
from langchain.utilities.requests import Requests
from langchain.utils.requests import Requests
class NLAToolkit(BaseToolkit):

View File

@@ -37,7 +37,7 @@ from langchain.schema import BasePromptTemplate
from langchain.schema.language_model import BaseLanguageModel
from langchain.tools.base import BaseTool
from langchain.tools.requests.tool import BaseRequestsTool
from langchain.utilities.requests import RequestsWrapper
from langchain.utils.requests import RequestsWrapper
#
# Requests tools with LLM-instructed extraction of truncated responses.

View File

@@ -19,7 +19,7 @@ from langchain.tools.requests.tool import (
RequestsPostTool,
RequestsPutTool,
)
from langchain.utilities.requests import TextRequestsWrapper
from langchain.utils.requests import TextRequestsWrapper
class RequestsToolkit(BaseToolkit):

View File

@@ -12,7 +12,7 @@ from langchain.chains.api import news_docs, open_meteo_docs, podcast_docs, tmdb_
from langchain.chains.api.base import APIChain
from langchain.chains.llm_math.base import LLMMathChain
from langchain.utilities.dalle_image_generator import DallEAPIWrapper
from langchain.utilities.requests import TextRequestsWrapper
from langchain.utils.requests import TextRequestsWrapper
from langchain.tools.arxiv.tool import ArxivQueryRun
from langchain.tools.golden_query.tool import GoldenQueryRun
from langchain.tools.pubmed.tool import PubmedQueryRun

View File

@@ -11,7 +11,7 @@ from langchain.agents.tools import Tool
from langchain.agents.types import AGENT_TO_CLASS
from langchain.chains.loading import load_chain, load_chain_from_config
from langchain.schema.language_model import BaseLanguageModel
from langchain.utilities.loading import try_load_from_hub
from langchain.utils.loading import try_load_from_hub
logger = logging.getLogger(__file__)

View File

@@ -9,9 +9,9 @@ from langchain.agents.react.wiki_prompt import WIKI_PROMPT
from langchain.agents.tools import Tool
from langchain.agents.utils import validate_tools_single_input
from langchain.docstore.base import Docstore
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Field
from langchain.schema import BasePromptTemplate
from langchain.schema.document import Document
from langchain.schema.language_model import BaseLanguageModel
from langchain.tools.base import BaseTool

View File

@@ -50,13 +50,12 @@ try:
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from langchain.embeddings.base import Embeddings
from langchain.llms.base import LLM, get_prompts
from langchain.load.dump import dumps
from langchain.load.load import loads
from langchain.schema import ChatGeneration, Generation
from langchain.schema.cache import RETURN_VAL_TYPE, BaseCache
from langchain.schema.embeddings import Embeddings
from langchain.utils import get_from_env
from langchain.vectorstores.redis import Redis as RedisVectorstore

View File

@@ -13,7 +13,7 @@ from langchain.chains.llm import LLMChain
from langchain.pydantic_v1 import Field, root_validator
from langchain.schema import BasePromptTemplate
from langchain.schema.language_model import BaseLanguageModel
from langchain.utilities.requests import TextRequestsWrapper
from langchain.utils.requests import TextRequestsWrapper
class APIChain(Chain):

View File

@@ -14,7 +14,7 @@ from langchain.chains.llm import LLMChain
from langchain.pydantic_v1 import BaseModel, Field
from langchain.schema.language_model import BaseLanguageModel
from langchain.tools.openapi.utils.api_models import APIOperation
from langchain.utilities.requests import Requests
from langchain.utils.requests import Requests
class _ParamMapping(NamedTuple):

View File

@@ -8,8 +8,8 @@ from langchain.callbacks.manager import (
CallbackManagerForChainRun,
)
from langchain.chains.base import Chain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Field
from langchain.schema.document import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter

View File

@@ -8,8 +8,8 @@ from langchain.callbacks.manager import Callbacks
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.chains.combine_documents.reduce import ReduceDocumentsChain
from langchain.chains.llm import LLMChain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Extra, root_validator
from langchain.schema.document import Document
class MapReduceDocumentsChain(BaseCombineDocumentsChain):

View File

@@ -7,9 +7,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
from langchain.callbacks.manager import Callbacks
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.chains.llm import LLMChain
from langchain.docstore.document import Document
from langchain.output_parsers.regex import RegexParser
from langchain.pydantic_v1 import Extra, root_validator
from langchain.schema.document import Document
class MapRerankDocumentsChain(BaseCombineDocumentsChain):

View File

@@ -6,8 +6,8 @@ from typing import Any, Callable, List, Optional, Protocol, Tuple
from langchain.callbacks.manager import Callbacks
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Extra
from langchain.schema.document import Document
class CombineDocsProtocol(Protocol):

View File

@@ -9,10 +9,10 @@ from langchain.chains.combine_documents.base import (
BaseCombineDocumentsChain,
)
from langchain.chains.llm import LLMChain
from langchain.docstore.document import Document
from langchain.prompts.prompt import PromptTemplate
from langchain.pydantic_v1 import Extra, Field, root_validator
from langchain.schema import BasePromptTemplate, format_document
from langchain.schema.document import Document
def _get_default_document_prompt() -> PromptTemplate:

View File

@@ -7,10 +7,10 @@ from langchain.chains.combine_documents.base import (
BaseCombineDocumentsChain,
)
from langchain.chains.llm import LLMChain
from langchain.docstore.document import Document
from langchain.prompts.prompt import PromptTemplate
from langchain.pydantic_v1 import Extra, Field, root_validator
from langchain.schema import BasePromptTemplate, format_document
from langchain.schema.document import Document
def _get_default_document_prompt() -> PromptTemplate:

View File

@@ -12,8 +12,8 @@ from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains.base import Chain
from langchain.chains.hyde.prompts import PROMPT_MAP
from langchain.chains.llm import LLMChain
from langchain.embeddings.base import Embeddings
from langchain.pydantic_v1 import Extra
from langchain.schema.embeddings import Embeddings
from langchain.schema.language_model import BaseLanguageModel

View File

@@ -7,7 +7,7 @@ from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains import LLMChain
from langchain.chains.base import Chain
from langchain.pydantic_v1 import Extra, Field, root_validator
from langchain.utilities.requests import TextRequestsWrapper
from langchain.utils.requests import TextRequestsWrapper
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" # noqa: E501

View File

@@ -29,7 +29,7 @@ from langchain.prompts.loading import (
load_prompt,
load_prompt_from_config,
)
from langchain.utilities.loading import try_load_from_hub
from langchain.utils.loading import try_load_from_hub
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/chains/"

View File

@@ -14,9 +14,9 @@ from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
from langchain.chains.llm import LLMChain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Extra
from langchain.schema import BasePromptTemplate
from langchain.schema.document import Document
from langchain.schema.language_model import BaseLanguageModel
from langchain.text_splitter import TextSplitter

View File

@@ -23,9 +23,9 @@ from langchain.chains.qa_with_sources.map_reduce_prompt import (
EXAMPLE_PROMPT,
QUESTION_PROMPT,
)
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Extra, root_validator
from langchain.schema import BasePromptTemplate
from langchain.schema.document import Document
from langchain.schema.language_model import BaseLanguageModel

View File

@@ -8,9 +8,9 @@ from langchain.callbacks.manager import (
)
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Field
from langchain.schema import BaseRetriever
from langchain.schema.document import Document
class RetrievalQAWithSourcesChain(BaseQAWithSourcesChain):

View File

@@ -9,8 +9,8 @@ from langchain.callbacks.manager import (
)
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
from langchain.docstore.document import Document
from langchain.pydantic_v1 import Field, root_validator
from langchain.schema.document import Document
from langchain.vectorstores.base import VectorStore

View File

@@ -4,9 +4,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Type
from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains.router.base import RouterChain
from langchain.docstore.document import Document
from langchain.embeddings.base import Embeddings
from langchain.pydantic_v1 import Extra
from langchain.schema.document import Document
from langchain.schema.embeddings import Embeddings
from langchain.vectorstores.base import VectorStore

View File

@@ -16,7 +16,7 @@ from langchain.schema.messages import (
HumanMessage,
SystemMessage,
)
from langchain.utilities.vertexai import raise_vertex_import_error
from langchain.utils.vertexai import raise_vertex_import_error
if TYPE_CHECKING:
from vertexai.language_models import ChatMessage, InputOutputTextPair

View File

@@ -2,7 +2,7 @@
from abc import ABC, abstractmethod
from typing import Dict, List, Union
from langchain.docstore.document import Document
from langchain.schema.document import Document
class Docstore(ABC):

View File

@@ -2,7 +2,7 @@
from typing import Dict, List, Optional, Union
from langchain.docstore.base import AddableMixin, Docstore
from langchain.docstore.document import Document
from langchain.schema.document import Document
class InMemoryDocstore(Docstore, AddableMixin):

View File

@@ -4,7 +4,7 @@
from typing import Union
from langchain.docstore.base import Docstore
from langchain.docstore.document import Document
from langchain.schema.document import Document
class Wikipedia(Docstore):

View File

@@ -2,8 +2,8 @@ import re
from pathlib import Path
from typing import Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class AcreomLoader(BaseLoader):

View File

@@ -1,7 +1,7 @@
from typing import Any, Callable, Iterator, List, Mapping, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utils.utils import guard_import
RecordHandler = Callable[[Any, Optional[str]], Document]

View File

@@ -1,8 +1,8 @@
import json
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utils import stringify_dict

View File

@@ -1,7 +1,7 @@
from typing import Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class AirtableLoader(BaseLoader):

View File

@@ -1,8 +1,8 @@
from typing import Any, Callable, Dict, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.pydantic_v1 import BaseModel, root_validator
from langchain.schema.document import Document
class ApifyDatasetLoader(BaseLoader, BaseModel):

View File

@@ -8,8 +8,8 @@ import warnings
from datetime import datetime, timezone
from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
if TYPE_CHECKING:
import arcgis

View File

@@ -1,7 +1,7 @@
from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utilities.arxiv import ArxivAPIWrapper

View File

@@ -3,8 +3,8 @@ from __future__ import annotations
from enum import Enum
from typing import TYPE_CHECKING, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
if TYPE_CHECKING:
import assemblyai

View File

@@ -7,8 +7,8 @@ from typing import Any, Dict, Iterator, List, Optional, Union, cast
import aiohttp
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class AZLyricsLoader(WebBaseLoader):

View File

@@ -1,10 +1,10 @@
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.azure_blob_storage_file import (
AzureBlobStorageFileLoader,
)
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class AzureBlobStorageContainerLoader(BaseLoader):

View File

@@ -2,9 +2,9 @@ import os
import tempfile
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.unstructured import UnstructuredFileLoader
from langchain.schema.document import Document
class AzureBlobStorageFileLoader(BaseLoader):

View File

@@ -3,8 +3,8 @@ import re
from pathlib import Path
from typing import Any, Iterator, List, Mapping, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utilities.bibtex import BibtexparserWrapper
logger = logging.getLogger(__name__)

View File

@@ -2,8 +2,8 @@ from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
if TYPE_CHECKING:
from google.auth.credentials import Credentials

View File

@@ -5,8 +5,8 @@ from typing import List, Tuple
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class BiliBiliLoader(BaseLoader):

View File

@@ -4,10 +4,10 @@ from pathlib import Path
from typing import Any, List, Optional, Tuple
from urllib.parse import unquote
from langchain.docstore.document import Document
from langchain.document_loaders.directory import DirectoryLoader
from langchain.document_loaders.pdf import PyPDFLoader
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class BlackboardLoader(WebBaseLoader):

View File

@@ -6,8 +6,8 @@ from typing import List, Optional
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class BlockchainType(Enum):

View File

@@ -1,7 +1,7 @@
from typing import Iterator, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utilities.brave_search import BraveSearchWrapper

View File

@@ -2,8 +2,8 @@ from typing import Iterator, List, Union
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class BrowserlessLoader(BaseLoader):

View File

@@ -2,8 +2,8 @@ import datetime
import json
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
def concatenate_rows(message: dict, title: str) -> str:

View File

@@ -2,8 +2,8 @@ import asyncio
import logging
from typing import Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class CollegeConfidentialLoader(WebBaseLoader):

View File

@@ -11,8 +11,8 @@ from tenacity import (
wait_exponential,
)
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,8 +1,8 @@
import csv
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class CoNLLULoader(BaseLoader):

View File

@@ -1,12 +1,12 @@
import csv
from typing import Any, Dict, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.unstructured import (
UnstructuredFileLoader,
validate_unstructured_version,
)
from langchain.schema.document import Document
class CSVLoader(BaseLoader):

View File

@@ -5,8 +5,8 @@ from typing import List
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,8 +1,8 @@
from datetime import datetime, timedelta
from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class DatadogLogsLoader(BaseLoader):

View File

@@ -1,7 +1,7 @@
from typing import Any, Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class BaseDataFrameLoader(BaseLoader):

View File

@@ -3,8 +3,8 @@ from typing import Any, List
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -4,11 +4,11 @@ import random
from pathlib import Path
from typing import Any, List, Optional, Type, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.html_bs import BSHTMLLoader
from langchain.document_loaders.text import TextLoader
from langchain.document_loaders.unstructured import UnstructuredFileLoader
from langchain.schema.document import Document
FILE_LOADER_TYPE = Union[
Type[UnstructuredFileLoader], Type[TextLoader], Type[BSHTMLLoader]

View File

@@ -2,8 +2,8 @@ from __future__ import annotations
from typing import TYPE_CHECKING, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
if TYPE_CHECKING:
import pandas as pd

View File

@@ -7,9 +7,9 @@ from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.pydantic_v1 import BaseModel, root_validator
from langchain.schema.document import Document
TD_NAME = "{http://www.w3.org/1999/xhtml}td"
TABLE_NAME = "{http://www.w3.org/1999/xhtml}table"

View File

@@ -11,9 +11,9 @@ import tempfile
from pathlib import Path
from typing import Any, Dict, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.pydantic_v1 import BaseModel, root_validator
from langchain.schema.document import Document
class DropboxLoader(BaseLoader, BaseModel):

View File

@@ -1,7 +1,7 @@
from typing import Dict, List, Optional, cast
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class DuckDBLoader(BaseLoader):

View File

@@ -1,12 +1,12 @@
import os
from typing import Any, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.unstructured import (
UnstructuredFileLoader,
satisfies_min_unstructured_version,
)
from langchain.schema.document import Document
class UnstructuredEmailLoader(UnstructuredFileLoader):

View File

@@ -5,10 +5,10 @@ from typing import Any, Dict, Iterator, List, Optional
import requests
from typing_extensions import NotRequired, TypedDict
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseBlobParser, BaseLoader
from langchain.document_loaders.blob_loaders import Blob
from langchain.pydantic_v1 import BaseModel, root_validator, validator
from langchain.schema.document import Document
from langchain.text_splitter import TextSplitter
from langchain.utils import get_from_dict_or_env

View File

@@ -4,8 +4,8 @@ from typing import Iterator, List
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class EtherscanLoader(BaseLoader):

View File

@@ -8,8 +8,8 @@ from base64 import b64decode
from time import strptime
from typing import Any, Dict, Iterator, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -3,8 +3,8 @@ import json
from pathlib import Path
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
def concatenate_rows(row: dict) -> str:

View File

@@ -1,7 +1,7 @@
from typing import Iterator, List, Optional, Sequence
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class FaunaLoader(BaseLoader):

View File

@@ -2,8 +2,8 @@ import json
import urllib.request
from typing import Any, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utils import stringify_dict

View File

@@ -1,8 +1,8 @@
from typing import Callable, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.gcs_file import GCSFileLoader
from langchain.schema.document import Document
class GCSDirectoryLoader(BaseLoader):

View File

@@ -2,9 +2,9 @@ import os
import tempfile
from typing import Callable, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.unstructured import UnstructuredFileLoader
from langchain.schema.document import Document
class GCSFileLoader(BaseLoader):

View File

@@ -1,7 +1,7 @@
from typing import Any, Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class GeoDataFrameLoader(BaseLoader):

View File

@@ -1,8 +1,8 @@
import os
from typing import Callable, List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class GitLoader(BaseLoader):

View File

@@ -1,8 +1,8 @@
from typing import Any, List, Optional
from urllib.parse import urljoin, urlparse
from langchain.docstore.document import Document
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class GitbookLoader(WebBaseLoader):

View File

@@ -4,9 +4,9 @@ from typing import Dict, Iterator, List, Literal, Optional, Union
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.pydantic_v1 import BaseModel, root_validator, validator
from langchain.schema.document import Document
from langchain.utils import get_from_dict_or_env

View File

@@ -11,9 +11,9 @@ import os
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.pydantic_v1 import BaseModel, root_validator, validator
from langchain.schema.document import Document
SCOPES = ["https://www.googleapis.com/auth/drive.readonly"]

View File

@@ -1,7 +1,7 @@
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class GutenbergLoader(BaseLoader):

View File

@@ -1,7 +1,7 @@
from typing import Any, List
from langchain.docstore.document import Document
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class HNLoader(WebBaseLoader):

View File

@@ -1,8 +1,8 @@
import logging
from typing import Dict, List, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
from typing import Iterator, List, Mapping, Optional, Sequence, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class HuggingFaceDatasetLoader(BaseLoader):

View File

@@ -2,9 +2,9 @@ from typing import List, Optional
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
IFIXIT_BASE_URL = "https://www.ifixit.com/api/2.0"

View File

@@ -2,8 +2,8 @@ from typing import Any, List, Tuple, Union
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class ImageCaptionLoader(BaseLoader):

View File

@@ -1,7 +1,7 @@
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema.document import Document
class IMSDbLoader(WebBaseLoader):

View File

@@ -2,8 +2,8 @@ import json
import urllib.request
from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utils import get_from_env, stringify_dict
IUGU_ENDPOINTS = {

View File

@@ -2,8 +2,8 @@ import json
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class JSONLoader(BaseLoader):

View File

@@ -2,8 +2,8 @@ import json
import urllib.request
from typing import Any, Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class LarkSuiteDocLoader(BaseLoader):

View File

@@ -3,8 +3,8 @@ from __future__ import annotations
import os
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Sequence
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
if TYPE_CHECKING:
import mastodon

View File

@@ -2,8 +2,8 @@ from __future__ import annotations
from typing import Any, Iterator, List, Optional, Sequence
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utilities.max_compute import MaxComputeAPIWrapper

View File

@@ -2,8 +2,8 @@ import logging
from pathlib import Path
from typing import List, Optional, Sequence, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
from typing import Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class MergedDataLoader(BaseLoader):

View File

@@ -2,8 +2,8 @@ import email
import logging
from typing import Dict, List, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -3,8 +3,8 @@ import urllib.request
from base64 import b64encode
from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.utils import get_from_env, stringify_value
MODERN_TREASURY_ENDPOINTS = {

View File

@@ -2,8 +2,8 @@
import logging
from typing import Any, Iterator, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
logger = logging.getLogger(__name__)

View File

@@ -3,8 +3,8 @@ import json
from pathlib import Path
from typing import Any, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
def concatenate_cells(

View File

@@ -1,8 +1,8 @@
from pathlib import Path
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
class NotionDirectoryLoader(BaseLoader):

View File

@@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional
import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
NOTION_BASE_URL = "https://api.notion.com/v1"
DATABASE_URL = NOTION_BASE_URL + "/databases/{database_id}/query"

View File

@@ -2,8 +2,8 @@ import json
import uuid
from typing import List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.schema.document import Document
from langchain.tools.nuclia.tool import NucliaUnderstandingAPI

View File

@@ -1,9 +1,9 @@
# coding:utf-8
from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
from langchain.document_loaders.obs_file import OBSFileLoader
from langchain.schema.document import Document
class OBSDirectoryLoader(BaseLoader):

Some files were not shown because too many files have changed in this diff Show More