mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-09 02:33:34 +00:00
Compare commits
2 Commits
isaac/tool
...
harrison/m
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c3326becf | ||
|
|
f9db904b12 |
@@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.schema.embeddings import Embeddings
|
||||
from langchain.prompts.prompt import PromptTemplate
|
||||
from langchain.schema import BaseOutputParser, BasePromptTemplate
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Dict,
|
||||
@@ -15,7 +16,6 @@ from typing import (
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from langchain.chat_loaders.base import ChatSession
|
||||
from langchain.schema.messages import (
|
||||
AIMessage,
|
||||
AIMessageChunk,
|
||||
@@ -27,6 +27,9 @@ from langchain.schema.messages import (
|
||||
SystemMessage,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain.chat_loaders.base import ChatSession
|
||||
|
||||
|
||||
async def aenumerate(
|
||||
iterable: AsyncIterator[Any], start: int = 0
|
||||
|
||||
@@ -8,7 +8,7 @@ from langchain.chains.api.openapi.chain import OpenAPIEndpointChain
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.tools.openapi.utils.api_models import APIOperation
|
||||
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
|
||||
from langchain.utilities.requests import Requests
|
||||
from langchain.utils.requests import Requests
|
||||
|
||||
|
||||
class NLATool(Tool):
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.tools.base import BaseTool
|
||||
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
|
||||
from langchain.tools.plugin import AIPlugin
|
||||
from langchain.utilities.requests import Requests
|
||||
from langchain.utils.requests import Requests
|
||||
|
||||
|
||||
class NLAToolkit(BaseToolkit):
|
||||
|
||||
@@ -37,7 +37,7 @@ from langchain.schema import BasePromptTemplate
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.tools.base import BaseTool
|
||||
from langchain.tools.requests.tool import BaseRequestsTool
|
||||
from langchain.utilities.requests import RequestsWrapper
|
||||
from langchain.utils.requests import RequestsWrapper
|
||||
|
||||
#
|
||||
# Requests tools with LLM-instructed extraction of truncated responses.
|
||||
|
||||
@@ -19,7 +19,7 @@ from langchain.tools.requests.tool import (
|
||||
RequestsPostTool,
|
||||
RequestsPutTool,
|
||||
)
|
||||
from langchain.utilities.requests import TextRequestsWrapper
|
||||
from langchain.utils.requests import TextRequestsWrapper
|
||||
|
||||
|
||||
class RequestsToolkit(BaseToolkit):
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain.chains.api import news_docs, open_meteo_docs, podcast_docs, tmdb_
|
||||
from langchain.chains.api.base import APIChain
|
||||
from langchain.chains.llm_math.base import LLMMathChain
|
||||
from langchain.utilities.dalle_image_generator import DallEAPIWrapper
|
||||
from langchain.utilities.requests import TextRequestsWrapper
|
||||
from langchain.utils.requests import TextRequestsWrapper
|
||||
from langchain.tools.arxiv.tool import ArxivQueryRun
|
||||
from langchain.tools.golden_query.tool import GoldenQueryRun
|
||||
from langchain.tools.pubmed.tool import PubmedQueryRun
|
||||
|
||||
@@ -11,7 +11,7 @@ from langchain.agents.tools import Tool
|
||||
from langchain.agents.types import AGENT_TO_CLASS
|
||||
from langchain.chains.loading import load_chain, load_chain_from_config
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.utilities.loading import try_load_from_hub
|
||||
from langchain.utils.loading import try_load_from_hub
|
||||
|
||||
logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
@@ -9,9 +9,9 @@ from langchain.agents.react.wiki_prompt import WIKI_PROMPT
|
||||
from langchain.agents.tools import Tool
|
||||
from langchain.agents.utils import validate_tools_single_input
|
||||
from langchain.docstore.base import Docstore
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Field
|
||||
from langchain.schema import BasePromptTemplate
|
||||
from langchain.schema.document import Document
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.tools.base import BaseTool
|
||||
|
||||
|
||||
@@ -50,13 +50,12 @@ try:
|
||||
except ImportError:
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.llms.base import LLM, get_prompts
|
||||
from langchain.load.dump import dumps
|
||||
from langchain.load.load import loads
|
||||
from langchain.schema import ChatGeneration, Generation
|
||||
from langchain.schema.cache import RETURN_VAL_TYPE, BaseCache
|
||||
from langchain.schema.embeddings import Embeddings
|
||||
from langchain.utils import get_from_env
|
||||
from langchain.vectorstores.redis import Redis as RedisVectorstore
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain.pydantic_v1 import Field, root_validator
|
||||
from langchain.schema import BasePromptTemplate
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.utilities.requests import TextRequestsWrapper
|
||||
from langchain.utils.requests import TextRequestsWrapper
|
||||
|
||||
|
||||
class APIChain(Chain):
|
||||
|
||||
@@ -14,7 +14,7 @@ from langchain.chains.llm import LLMChain
|
||||
from langchain.pydantic_v1 import BaseModel, Field
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.tools.openapi.utils.api_models import APIOperation
|
||||
from langchain.utilities.requests import Requests
|
||||
from langchain.utils.requests import Requests
|
||||
|
||||
|
||||
class _ParamMapping(NamedTuple):
|
||||
|
||||
@@ -8,8 +8,8 @@ from langchain.callbacks.manager import (
|
||||
CallbackManagerForChainRun,
|
||||
)
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Field
|
||||
from langchain.schema.document import Document
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter
|
||||
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@ from langchain.callbacks.manager import Callbacks
|
||||
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain.chains.combine_documents.reduce import ReduceDocumentsChain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Extra, root_validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class MapReduceDocumentsChain(BaseCombineDocumentsChain):
|
||||
|
||||
@@ -7,9 +7,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
||||
from langchain.callbacks.manager import Callbacks
|
||||
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.output_parsers.regex import RegexParser
|
||||
from langchain.pydantic_v1 import Extra, root_validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class MapRerankDocumentsChain(BaseCombineDocumentsChain):
|
||||
|
||||
@@ -6,8 +6,8 @@ from typing import Any, Callable, List, Optional, Protocol, Tuple
|
||||
|
||||
from langchain.callbacks.manager import Callbacks
|
||||
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Extra
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class CombineDocsProtocol(Protocol):
|
||||
|
||||
@@ -9,10 +9,10 @@ from langchain.chains.combine_documents.base import (
|
||||
BaseCombineDocumentsChain,
|
||||
)
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.prompts.prompt import PromptTemplate
|
||||
from langchain.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain.schema import BasePromptTemplate, format_document
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
def _get_default_document_prompt() -> PromptTemplate:
|
||||
|
||||
@@ -7,10 +7,10 @@ from langchain.chains.combine_documents.base import (
|
||||
BaseCombineDocumentsChain,
|
||||
)
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.prompts.prompt import PromptTemplate
|
||||
from langchain.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain.schema import BasePromptTemplate, format_document
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
def _get_default_document_prompt() -> PromptTemplate:
|
||||
|
||||
@@ -12,8 +12,8 @@ from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.chains.hyde.prompts import PROMPT_MAP
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.pydantic_v1 import Extra
|
||||
from langchain.schema.embeddings import Embeddings
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain.utilities.requests import TextRequestsWrapper
|
||||
from langchain.utils.requests import TextRequestsWrapper
|
||||
|
||||
DEFAULT_HEADERS = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" # noqa: E501
|
||||
|
||||
@@ -29,7 +29,7 @@ from langchain.prompts.loading import (
|
||||
load_prompt,
|
||||
load_prompt_from_config,
|
||||
)
|
||||
from langchain.utilities.loading import try_load_from_hub
|
||||
from langchain.utils.loading import try_load_from_hub
|
||||
|
||||
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/chains/"
|
||||
|
||||
|
||||
@@ -14,9 +14,9 @@ from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain
|
||||
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Extra
|
||||
from langchain.schema import BasePromptTemplate
|
||||
from langchain.schema.document import Document
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.text_splitter import TextSplitter
|
||||
|
||||
|
||||
@@ -23,9 +23,9 @@ from langchain.chains.qa_with_sources.map_reduce_prompt import (
|
||||
EXAMPLE_PROMPT,
|
||||
QUESTION_PROMPT,
|
||||
)
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Extra, root_validator
|
||||
from langchain.schema import BasePromptTemplate
|
||||
from langchain.schema.document import Document
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ from langchain.callbacks.manager import (
|
||||
)
|
||||
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
|
||||
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Field
|
||||
from langchain.schema import BaseRetriever
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class RetrievalQAWithSourcesChain(BaseQAWithSourcesChain):
|
||||
|
||||
@@ -9,8 +9,8 @@ from langchain.callbacks.manager import (
|
||||
)
|
||||
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
|
||||
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.pydantic_v1 import Field, root_validator
|
||||
from langchain.schema.document import Document
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
|
||||
|
||||
|
||||
@@ -4,9 +4,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Type
|
||||
|
||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain.chains.router.base import RouterChain
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.pydantic_v1 import Extra
|
||||
from langchain.schema.document import Document
|
||||
from langchain.schema.embeddings import Embeddings
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from langchain.schema.messages import (
|
||||
HumanMessage,
|
||||
SystemMessage,
|
||||
)
|
||||
from langchain.utilities.vertexai import raise_vertex_import_error
|
||||
from langchain.utils.vertexai import raise_vertex_import_error
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from vertexai.language_models import ChatMessage, InputOutputTextPair
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class Docstore(ABC):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from langchain.docstore.base import AddableMixin, Docstore
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class InMemoryDocstore(Docstore, AddableMixin):
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
from typing import Union
|
||||
|
||||
from langchain.docstore.base import Docstore
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class Wikipedia(Docstore):
|
||||
|
||||
@@ -2,8 +2,8 @@ import re
|
||||
from pathlib import Path
|
||||
from typing import Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class AcreomLoader(BaseLoader):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, Callable, Iterator, List, Mapping, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils.utils import guard_import
|
||||
|
||||
RecordHandler = Callable[[Any, Optional[str]], Document]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils import stringify_dict
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class AirtableLoader(BaseLoader):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Any, Callable, Dict, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class ApifyDatasetLoader(BaseLoader, BaseModel):
|
||||
|
||||
@@ -8,8 +8,8 @@ import warnings
|
||||
from datetime import datetime, timezone
|
||||
from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import arcgis
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utilities.arxiv import ArxivAPIWrapper
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ from __future__ import annotations
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import assemblyai
|
||||
|
||||
@@ -7,8 +7,8 @@ from typing import Any, Dict, Iterator, List, Optional, Union, cast
|
||||
import aiohttp
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class AZLyricsLoader(WebBaseLoader):
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.azure_blob_storage_file import (
|
||||
AzureBlobStorageFileLoader,
|
||||
)
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class AzureBlobStorageContainerLoader(BaseLoader):
|
||||
|
||||
@@ -2,9 +2,9 @@ import os
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.unstructured import UnstructuredFileLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class AzureBlobStorageFileLoader(BaseLoader):
|
||||
|
||||
@@ -3,8 +3,8 @@ import re
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator, List, Mapping, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utilities.bibtex import BibtexparserWrapper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -2,8 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from google.auth.credentials import Credentials
|
||||
|
||||
@@ -5,8 +5,8 @@ from typing import List, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class BiliBiliLoader(BaseLoader):
|
||||
|
||||
@@ -4,10 +4,10 @@ from pathlib import Path
|
||||
from typing import Any, List, Optional, Tuple
|
||||
from urllib.parse import unquote
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.directory import DirectoryLoader
|
||||
from langchain.document_loaders.pdf import PyPDFLoader
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class BlackboardLoader(WebBaseLoader):
|
||||
|
||||
@@ -6,8 +6,8 @@ from typing import List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class BlockchainType(Enum):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utilities.brave_search import BraveSearchWrapper
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ from typing import Iterator, List, Union
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class BrowserlessLoader(BaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ import datetime
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
def concatenate_rows(message: dict, title: str) -> str:
|
||||
|
||||
@@ -2,8 +2,8 @@ import asyncio
|
||||
import logging
|
||||
from typing import Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class CollegeConfidentialLoader(WebBaseLoader):
|
||||
|
||||
@@ -11,8 +11,8 @@ from tenacity import (
|
||||
wait_exponential,
|
||||
)
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import csv
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class CoNLLULoader(BaseLoader):
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import csv
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.unstructured import (
|
||||
UnstructuredFileLoader,
|
||||
validate_unstructured_version,
|
||||
)
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class CSVLoader(BaseLoader):
|
||||
|
||||
@@ -5,8 +5,8 @@ from typing import List
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class DatadogLogsLoader(BaseLoader):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class BaseDataFrameLoader(BaseLoader):
|
||||
|
||||
@@ -3,8 +3,8 @@ from typing import Any, List
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@ import random
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional, Type, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.html_bs import BSHTMLLoader
|
||||
from langchain.document_loaders.text import TextLoader
|
||||
from langchain.document_loaders.unstructured import UnstructuredFileLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
FILE_LOADER_TYPE = Union[
|
||||
Type[UnstructuredFileLoader], Type[TextLoader], Type[BSHTMLLoader]
|
||||
|
||||
@@ -2,8 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import pandas as pd
|
||||
|
||||
@@ -7,9 +7,9 @@ from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
TD_NAME = "{http://www.w3.org/1999/xhtml}td"
|
||||
TABLE_NAME = "{http://www.w3.org/1999/xhtml}table"
|
||||
|
||||
@@ -11,9 +11,9 @@ import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class DropboxLoader(BaseLoader, BaseModel):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Dict, List, Optional, cast
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class DuckDBLoader(BaseLoader):
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import os
|
||||
from typing import Any, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.unstructured import (
|
||||
UnstructuredFileLoader,
|
||||
satisfies_min_unstructured_version,
|
||||
)
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class UnstructuredEmailLoader(UnstructuredFileLoader):
|
||||
|
||||
@@ -5,10 +5,10 @@ from typing import Any, Dict, Iterator, List, Optional
|
||||
import requests
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseBlobParser, BaseLoader
|
||||
from langchain.document_loaders.blob_loaders import Blob
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator, validator
|
||||
from langchain.schema.document import Document
|
||||
from langchain.text_splitter import TextSplitter
|
||||
from langchain.utils import get_from_dict_or_env
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ from typing import Iterator, List
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class EtherscanLoader(BaseLoader):
|
||||
|
||||
@@ -8,8 +8,8 @@ from base64 import b64decode
|
||||
from time import strptime
|
||||
from typing import Any, Dict, Iterator, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
def concatenate_rows(row: dict) -> str:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Iterator, List, Optional, Sequence
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class FaunaLoader(BaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ import json
|
||||
import urllib.request
|
||||
from typing import Any, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils import stringify_dict
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.gcs_file import GCSFileLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GCSDirectoryLoader(BaseLoader):
|
||||
|
||||
@@ -2,9 +2,9 @@ import os
|
||||
import tempfile
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.unstructured import UnstructuredFileLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GCSFileLoader(BaseLoader):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GeoDataFrameLoader(BaseLoader):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import os
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GitLoader(BaseLoader):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Any, List, Optional
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GitbookLoader(WebBaseLoader):
|
||||
|
||||
@@ -4,9 +4,9 @@ from typing import Dict, Iterator, List, Literal, Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator, validator
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils import get_from_dict_or_env
|
||||
|
||||
|
||||
|
||||
@@ -11,9 +11,9 @@ import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.pydantic_v1 import BaseModel, root_validator, validator
|
||||
from langchain.schema.document import Document
|
||||
|
||||
SCOPES = ["https://www.googleapis.com/auth/drive.readonly"]
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class GutenbergLoader(BaseLoader):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class HNLoader(WebBaseLoader):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import logging
|
||||
from typing import Dict, List, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Iterator, List, Mapping, Optional, Sequence, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class HuggingFaceDatasetLoader(BaseLoader):
|
||||
|
||||
@@ -2,9 +2,9 @@ from typing import List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
IFIXIT_BASE_URL = "https://www.ifixit.com/api/2.0"
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ from typing import Any, List, Tuple, Union
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class ImageCaptionLoader(BaseLoader):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.web_base import WebBaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class IMSDbLoader(WebBaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ import json
|
||||
import urllib.request
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils import get_from_env, stringify_dict
|
||||
|
||||
IUGU_ENDPOINTS = {
|
||||
|
||||
@@ -2,8 +2,8 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class JSONLoader(BaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ import json
|
||||
import urllib.request
|
||||
from typing import Any, Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class LarkSuiteDocLoader(BaseLoader):
|
||||
|
||||
@@ -3,8 +3,8 @@ from __future__ import annotations
|
||||
import os
|
||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Sequence
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import mastodon
|
||||
|
||||
@@ -2,8 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterator, List, Optional, Sequence
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utilities.max_compute import MaxComputeAPIWrapper
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Sequence, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class MergedDataLoader(BaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ import email
|
||||
import logging
|
||||
from typing import Dict, List, Union
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import urllib.request
|
||||
from base64 import b64encode
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.utils import get_from_env, stringify_value
|
||||
|
||||
MODERN_TREASURY_ENDPOINTS = {
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import logging
|
||||
from typing import Any, Iterator, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import Any, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
def concatenate_cells(
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class NotionDirectoryLoader(BaseLoader):
|
||||
|
||||
@@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
NOTION_BASE_URL = "https://api.notion.com/v1"
|
||||
DATABASE_URL = NOTION_BASE_URL + "/databases/{database_id}/query"
|
||||
|
||||
@@ -2,8 +2,8 @@ import json
|
||||
import uuid
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.schema.document import Document
|
||||
from langchain.tools.nuclia.tool import NucliaUnderstandingAPI
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# coding:utf-8
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.document_loaders.obs_file import OBSFileLoader
|
||||
from langchain.schema.document import Document
|
||||
|
||||
|
||||
class OBSDirectoryLoader(BaseLoader):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user