mirror of
https://github.com/hwchase17/langchain.git
synced 2025-05-18 05:21:21 +00:00
langchain[patch]: Update more code to use langchain community as an optional dependency (#21170)
More code to use langchain community as an optional dependency
This commit is contained in:
parent
cd4c54282a
commit
c306364b06
@ -1,7 +1,8 @@
|
|||||||
"""Chain that implements the ReAct paper from https://arxiv.org/pdf/2210.03629.pdf."""
|
"""Chain that implements the ReAct paper from https://arxiv.org/pdf/2210.03629.pdf."""
|
||||||
from typing import Any, List, Optional, Sequence
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, List, Optional, Sequence
|
||||||
|
|
||||||
from langchain_community.docstore.base import Docstore
|
|
||||||
from langchain_core._api import deprecated
|
from langchain_core._api import deprecated
|
||||||
from langchain_core.documents import Document
|
from langchain_core.documents import Document
|
||||||
from langchain_core.language_models import BaseLanguageModel
|
from langchain_core.language_models import BaseLanguageModel
|
||||||
@ -16,6 +17,9 @@ from langchain.agents.react.textworld_prompt import TEXTWORLD_PROMPT
|
|||||||
from langchain.agents.react.wiki_prompt import WIKI_PROMPT
|
from langchain.agents.react.wiki_prompt import WIKI_PROMPT
|
||||||
from langchain.agents.utils import validate_tools_single_input
|
from langchain.agents.utils import validate_tools_single_input
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.docstore.base import Docstore
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.2.0")
|
@deprecated("0.1.0", removal="0.2.0")
|
||||||
class ReActDocstoreAgent(Agent):
|
class ReActDocstoreAgent(Agent):
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
"""Chain that does self-ask with search."""
|
"""Chain that does self-ask with search."""
|
||||||
from typing import Any, Sequence, Union
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Sequence, Union
|
||||||
|
|
||||||
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
|
|
||||||
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
|
|
||||||
from langchain_community.utilities.serpapi import SerpAPIWrapper
|
|
||||||
from langchain_core._api import deprecated
|
from langchain_core._api import deprecated
|
||||||
from langchain_core.language_models import BaseLanguageModel
|
from langchain_core.language_models import BaseLanguageModel
|
||||||
from langchain_core.prompts import BasePromptTemplate
|
from langchain_core.prompts import BasePromptTemplate
|
||||||
@ -18,6 +17,11 @@ from langchain.agents.self_ask_with_search.output_parser import SelfAskOutputPar
|
|||||||
from langchain.agents.self_ask_with_search.prompt import PROMPT
|
from langchain.agents.self_ask_with_search.prompt import PROMPT
|
||||||
from langchain.agents.utils import validate_tools_single_input
|
from langchain.agents.utils import validate_tools_single_input
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
|
||||||
|
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
|
||||||
|
from langchain_community.utilities.serpapi import SerpAPIWrapper
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_self_ask_with_search", removal="0.2.0")
|
@deprecated("0.1.0", alternative="create_self_ask_with_search", removal="0.2.0")
|
||||||
class SelfAskWithSearchAgent(Agent):
|
class SelfAskWithSearchAgent(Agent):
|
||||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
|||||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from langchain_community.utilities.requests import TextRequestsWrapper
|
|
||||||
from langchain_core.callbacks import (
|
from langchain_core.callbacks import (
|
||||||
AsyncCallbackManagerForChainRun,
|
AsyncCallbackManagerForChainRun,
|
||||||
CallbackManagerForChainRun,
|
CallbackManagerForChainRun,
|
||||||
@ -50,6 +49,9 @@ def _check_in_allowed_domain(url: str, limit_to_domains: Sequence[str]) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from langchain_community.utilities.requests import TextRequestsWrapper
|
||||||
|
|
||||||
class APIChain(Chain):
|
class APIChain(Chain):
|
||||||
"""Chain that makes API calls and summarizes the responses to answer a question.
|
"""Chain that makes API calls and summarizes the responses to answer a question.
|
||||||
|
|
||||||
@ -123,7 +125,10 @@ class APIChain(Chain):
|
|||||||
"You must specify a list of domains to limit access using "
|
"You must specify a list of domains to limit access using "
|
||||||
"`limit_to_domains`"
|
"`limit_to_domains`"
|
||||||
)
|
)
|
||||||
if not values["limit_to_domains"] and values["limit_to_domains"] is not None:
|
if (
|
||||||
|
not values["limit_to_domains"]
|
||||||
|
and values["limit_to_domains"] is not None
|
||||||
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Please provide a list of domains to limit access using "
|
"Please provide a list of domains to limit access using "
|
||||||
"`limit_to_domains`."
|
"`limit_to_domains`."
|
||||||
@ -179,7 +184,9 @@ class APIChain(Chain):
|
|||||||
inputs: Dict[str, Any],
|
inputs: Dict[str, Any],
|
||||||
run_manager: Optional[AsyncCallbackManagerForChainRun] = None,
|
run_manager: Optional[AsyncCallbackManagerForChainRun] = None,
|
||||||
) -> Dict[str, str]:
|
) -> Dict[str, str]:
|
||||||
_run_manager = run_manager or AsyncCallbackManagerForChainRun.get_noop_manager()
|
_run_manager = (
|
||||||
|
run_manager or AsyncCallbackManagerForChainRun.get_noop_manager()
|
||||||
|
)
|
||||||
question = inputs[self.question_key]
|
question = inputs[self.question_key]
|
||||||
api_url = await self.api_request_chain.apredict(
|
api_url = await self.api_request_chain.apredict(
|
||||||
question=question,
|
question=question,
|
||||||
@ -236,3 +243,11 @@ class APIChain(Chain):
|
|||||||
@property
|
@property
|
||||||
def _chain_type(self) -> str:
|
def _chain_type(self) -> str:
|
||||||
return "api_chain"
|
return "api_chain"
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
class APIChain: # type: ignore[no-redef]
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
raise ImportError(
|
||||||
|
"To use the APIChain, you must install the langchain_community package."
|
||||||
|
"pip install langchain_community"
|
||||||
|
)
|
||||||
|
@ -5,7 +5,6 @@ from pathlib import Path
|
|||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from langchain_community.llms.loading import load_llm, load_llm_from_config
|
|
||||||
from langchain_core.prompts.loading import (
|
from langchain_core.prompts.loading import (
|
||||||
_load_output_parser,
|
_load_output_parser,
|
||||||
load_prompt,
|
load_prompt,
|
||||||
@ -30,6 +29,27 @@ from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesCha
|
|||||||
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
|
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
|
||||||
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
|
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
|
||||||
|
|
||||||
|
try:
|
||||||
|
from langchain_community.llms.loading import load_llm, load_llm_from_config
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
def load_llm(*args: Any, **kwargs: Any) -> None: # type: ignore
|
||||||
|
raise ImportError(
|
||||||
|
"To use this load_llm functionality you must install the "
|
||||||
|
"langchain_community package. "
|
||||||
|
"You can install it with `pip install langchain_community`"
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_llm_from_config( # type: ignore
|
||||||
|
*args: Any, **kwargs: Any
|
||||||
|
) -> None:
|
||||||
|
raise ImportError(
|
||||||
|
"To use this load_llm_from_config functionality you must install the "
|
||||||
|
"langchain_community package. "
|
||||||
|
"You can install it with `pip install langchain_community`"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/chains/"
|
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/chains/"
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ from collections import defaultdict
|
|||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
|
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from langchain_community.utilities.openapi import OpenAPISpec
|
|
||||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||||
from langchain_core.language_models import BaseLanguageModel
|
from langchain_core.language_models import BaseLanguageModel
|
||||||
from langchain_core.output_parsers.openai_functions import JsonOutputFunctionsParser
|
from langchain_core.output_parsers.openai_functions import JsonOutputFunctionsParser
|
||||||
@ -20,6 +19,7 @@ from langchain.chains.sequential import SequentialChain
|
|||||||
from langchain.tools import APIOperation
|
from langchain.tools import APIOperation
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.utilities.openapi import OpenAPISpec
|
||||||
from openapi_pydantic import Parameter
|
from openapi_pydantic import Parameter
|
||||||
|
|
||||||
|
|
||||||
@ -255,6 +255,13 @@ def get_openapi_chain(
|
|||||||
prompt: Main prompt template to use.
|
prompt: Main prompt template to use.
|
||||||
request_chain: Chain for taking the functions output and executing the request.
|
request_chain: Chain for taking the functions output and executing the request.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
|
from langchain_community.utilities.openapi import OpenAPISpec
|
||||||
|
except ImportError as e:
|
||||||
|
raise ImportError(
|
||||||
|
"Could not import langchain_community.utilities.openapi. "
|
||||||
|
"Please install it with `pip install langchain-community`."
|
||||||
|
) from e
|
||||||
if isinstance(spec, str):
|
if isinstance(spec, str):
|
||||||
for conversion in (
|
for conversion in (
|
||||||
OpenAPISpec.from_url,
|
OpenAPISpec.from_url,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from typing import Any, Dict, List, Optional, TypedDict, Union
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict, Union
|
||||||
|
|
||||||
from langchain_community.utilities.sql_database import SQLDatabase
|
|
||||||
from langchain_core.language_models import BaseLanguageModel
|
from langchain_core.language_models import BaseLanguageModel
|
||||||
from langchain_core.output_parsers import StrOutputParser
|
from langchain_core.output_parsers import StrOutputParser
|
||||||
from langchain_core.prompts import BasePromptTemplate
|
from langchain_core.prompts import BasePromptTemplate
|
||||||
@ -8,6 +9,9 @@ from langchain_core.runnables import Runnable, RunnablePassthrough
|
|||||||
|
|
||||||
from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS
|
from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.utilities.sql_database import SQLDatabase
|
||||||
|
|
||||||
|
|
||||||
def _strip(text: str) -> str:
|
def _strip(text: str) -> str:
|
||||||
return text.strip()
|
return text.strip()
|
||||||
|
@ -1,3 +1,27 @@
|
|||||||
from langchain_community.document_loaders.parsers.language.python import PythonSegmenter
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
__all__ = ["PythonSegmenter"]
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.document_loaders.parsers.language.python import (
|
||||||
|
PythonSegmenter,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {
|
||||||
|
"PythonSegmenter": "langchain_community.document_loaders.parsers.language.python"
|
||||||
|
}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"PythonSegmenter",
|
||||||
|
]
|
||||||
|
@ -1,5 +1,26 @@
|
|||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.document_loaders.pyspark_dataframe import (
|
from langchain_community.document_loaders.pyspark_dataframe import (
|
||||||
PySparkDataFrameLoader,
|
PySparkDataFrameLoader,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {
|
||||||
|
"PySparkDataFrameLoader": "langchain_community.document_loaders.pyspark_dataframe"
|
||||||
|
}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["PySparkDataFrameLoader"]
|
__all__ = ["PySparkDataFrameLoader"]
|
||||||
|
@ -1,3 +1,22 @@
|
|||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.document_loaders.python import PythonLoader
|
from langchain_community.document_loaders.python import PythonLoader
|
||||||
|
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {"PythonLoader": "langchain_community.document_loaders.python"}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["PythonLoader"]
|
__all__ = ["PythonLoader"]
|
||||||
|
@ -1,5 +1,21 @@
|
|||||||
from langchain_community.embeddings.sentence_transformer import (
|
from typing import TYPE_CHECKING, Any
|
||||||
SentenceTransformerEmbeddings,
|
|
||||||
)
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.embeddings import SentenceTransformerEmbeddings
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {"SentenceTransformerEmbeddings": "langchain_community.embeddings"}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["SentenceTransformerEmbeddings"]
|
__all__ = ["SentenceTransformerEmbeddings"]
|
||||||
|
@ -5,8 +5,6 @@ import logging
|
|||||||
import re
|
import re
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
|
|
||||||
from langchain_community.chat_models.openai import ChatOpenAI
|
|
||||||
from langchain_core.callbacks.manager import Callbacks
|
from langchain_core.callbacks.manager import Callbacks
|
||||||
from langchain_core.language_models import BaseLanguageModel
|
from langchain_core.language_models import BaseLanguageModel
|
||||||
from langchain_core.output_parsers import BaseOutputParser
|
from langchain_core.output_parsers import BaseOutputParser
|
||||||
@ -258,10 +256,7 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain):
|
|||||||
ValueError: If the input variables are not as expected.
|
ValueError: If the input variables are not as expected.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not (
|
if not (hasattr(llm, "model_name") and not llm.model_name.startswith("gpt-4")):
|
||||||
isinstance(llm, (ChatOpenAI, AzureChatOpenAI))
|
|
||||||
and llm.model_name.startswith("gpt-4")
|
|
||||||
):
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"This chain was only tested with GPT-4. \
|
"This chain was only tested with GPT-4. \
|
||||||
Performance may be significantly worse with other models."
|
Performance may be significantly worse with other models."
|
||||||
|
@ -11,12 +11,33 @@ Importantly, Index keeps on working even if the content being written is derived
|
|||||||
via a set of transformations from some source content (e.g., indexing children
|
via a set of transformations from some source content (e.g., indexing children
|
||||||
documents that were derived from parent documents by chunking.)
|
documents that were derived from parent documents by chunking.)
|
||||||
"""
|
"""
|
||||||
from langchain_community.graphs.index_creator import GraphIndexCreator
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from langchain_core.indexing.api import IndexingResult, aindex, index
|
from langchain_core.indexing.api import IndexingResult, aindex, index
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
from langchain.indexes._sql_record_manager import SQLRecordManager
|
from langchain.indexes._sql_record_manager import SQLRecordManager
|
||||||
from langchain.indexes.vectorstore import VectorstoreIndexCreator
|
from langchain.indexes.vectorstore import VectorstoreIndexCreator
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.graphs.index_creator import GraphIndexCreator
|
||||||
|
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {
|
||||||
|
"GraphIndexCreator": "langchain_community.graphs.index_creator",
|
||||||
|
}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# Keep sorted
|
# Keep sorted
|
||||||
"aindex",
|
"aindex",
|
||||||
|
@ -1,5 +1,27 @@
|
|||||||
"""Graph Index Creator."""
|
"""**Graphs** provide a natural language interface to graph databases."""
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.graphs.index_creator import GraphIndexCreator
|
from langchain_community.graphs.index_creator import GraphIndexCreator
|
||||||
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
|
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
|
||||||
|
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {
|
||||||
|
"GraphIndexCreator": "langchain_community.graphs.index_creator",
|
||||||
|
"NetworkxEntityGraph": "langchain_community.graphs.networkx_graph",
|
||||||
|
}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["GraphIndexCreator", "NetworkxEntityGraph"]
|
__all__ = ["GraphIndexCreator", "NetworkxEntityGraph"]
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
# flake8: noqa
|
# flake8: noqa
|
||||||
|
|
||||||
from langchain_community.graphs.networkx_graph import KG_TRIPLE_DELIMITER
|
|
||||||
from langchain_core.prompts.prompt import PromptTemplate
|
from langchain_core.prompts.prompt import PromptTemplate
|
||||||
|
|
||||||
|
KG_TRIPLE_DELIMITER = "<|>"
|
||||||
|
|
||||||
_DEFAULT_KNOWLEDGE_TRIPLE_EXTRACTION_TEMPLATE = (
|
_DEFAULT_KNOWLEDGE_TRIPLE_EXTRACTION_TEMPLATE = (
|
||||||
"You are a networked intelligence helping a human track knowledge triples"
|
"You are a networked intelligence helping a human track knowledge triples"
|
||||||
" about all relevant people, things, concepts, etc. and integrating"
|
" about all relevant people, things, concepts, etc. and integrating"
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from typing import Any, Dict, List, Optional, Type
|
from typing import Any, Dict, List, Optional, Type
|
||||||
|
|
||||||
from langchain_community.vectorstores.inmemory import InMemoryVectorStore
|
|
||||||
from langchain_core.document_loaders import BaseLoader
|
from langchain_core.document_loaders import BaseLoader
|
||||||
from langchain_core.documents import Document
|
from langchain_core.documents import Document
|
||||||
from langchain_core.embeddings import Embeddings
|
from langchain_core.embeddings import Embeddings
|
||||||
@ -117,10 +116,30 @@ class VectorStoreIndexWrapper(BaseModel):
|
|||||||
return await chain.ainvoke({chain.question_key: question})
|
return await chain.ainvoke({chain.question_key: question})
|
||||||
|
|
||||||
|
|
||||||
|
def _get_in_memory_vectorstore() -> Type[VectorStore]:
|
||||||
|
"""Get the InMemoryVectorStore."""
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
try:
|
||||||
|
from langchain_community.vectorstores.inmemory import InMemoryVectorStore
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Please install langchain-community to use the InMemoryVectorStore."
|
||||||
|
)
|
||||||
|
warnings.warn(
|
||||||
|
"Using InMemoryVectorStore as the default vectorstore."
|
||||||
|
"This memory store won't persist data. You should explicitly"
|
||||||
|
"specify a vectorstore when using VectorstoreIndexCreator"
|
||||||
|
)
|
||||||
|
return InMemoryVectorStore
|
||||||
|
|
||||||
|
|
||||||
class VectorstoreIndexCreator(BaseModel):
|
class VectorstoreIndexCreator(BaseModel):
|
||||||
"""Logic for creating indexes."""
|
"""Logic for creating indexes."""
|
||||||
|
|
||||||
vectorstore_cls: Type[VectorStore] = InMemoryVectorStore
|
vectorstore_cls: Type[VectorStore] = Field(
|
||||||
|
default_factory=_get_in_memory_vectorstore
|
||||||
|
)
|
||||||
embedding: Embeddings
|
embedding: Embeddings
|
||||||
text_splitter: TextSplitter = Field(default_factory=_get_default_text_splitter)
|
text_splitter: TextSplitter = Field(default_factory=_get_default_text_splitter)
|
||||||
vectorstore_kwargs: dict = Field(default_factory=dict)
|
vectorstore_kwargs: dict = Field(default_factory=dict)
|
||||||
|
@ -1,3 +1,23 @@
|
|||||||
from langchain_community.llms.titan_takeoff import TitanTakeoff as TitanTakeoffPro
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
__all__ = ["TitanTakeoffPro"]
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from langchain_community.llms import TitanTakeoffPro
|
||||||
|
|
||||||
|
# Create a way to dynamically look up deprecated imports.
|
||||||
|
# Used to consolidate logic for raising deprecation warnings and
|
||||||
|
# handling optional imports.
|
||||||
|
DEPRECATED_LOOKUP = {"TitanTakeoffPro": "langchain_community.llms"}
|
||||||
|
|
||||||
|
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _import_attribute(name)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"TitanTakeoffPro",
|
||||||
|
]
|
||||||
|
@ -1,4 +1,21 @@
|
|||||||
"""For backwards compatibility."""
|
"""For backwards compatibility."""
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.utilities.python import PythonREPL
|
from langchain_community.utilities.python import PythonREPL
|
||||||
|
|
||||||
|
|
||||||
|
_importer = create_importer(
|
||||||
|
__package__,
|
||||||
|
deprecated_lookups={"PythonREPL": "langchain_community.utilities.python"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _importer(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["PythonREPL"]
|
__all__ = ["PythonREPL"]
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.app import (
|
|
||||||
AINAppOps,
|
|
||||||
AppOperationType,
|
|
||||||
AppSchema,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ["AppOperationType", "AppSchema", "AINAppOps"]
|
|
@ -1,3 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.base import AINBaseTool, OperationType
|
|
||||||
|
|
||||||
__all__ = ["OperationType", "AINBaseTool"]
|
|
@ -1,3 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.owner import AINOwnerOps, RuleSchema
|
|
||||||
|
|
||||||
__all__ = ["RuleSchema", "AINOwnerOps"]
|
|
@ -1,3 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.rule import AINRuleOps, RuleSchema
|
|
||||||
|
|
||||||
__all__ = ["RuleSchema", "AINRuleOps"]
|
|
@ -1,3 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.transfer import AINTransfer, TransferSchema
|
|
||||||
|
|
||||||
__all__ = ["TransferSchema", "AINTransfer"]
|
|
@ -1,3 +0,0 @@
|
|||||||
from langchain_community.tools.ainetwork.value import AINValueOps, ValueSchema
|
|
||||||
|
|
||||||
__all__ = ["ValueSchema", "AINValueOps"]
|
|
@ -1,3 +1,23 @@
|
|||||||
|
"""For backwards compatibility."""
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.tools.sql_database.prompt import QUERY_CHECKER
|
from langchain_community.tools.sql_database.prompt import QUERY_CHECKER
|
||||||
|
|
||||||
|
|
||||||
|
_importer = create_importer(
|
||||||
|
__package__,
|
||||||
|
deprecated_lookups={
|
||||||
|
"QUERY_CHECKER": "langchain_community.tools.sql_database.prompt",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _importer(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["QUERY_CHECKER"]
|
__all__ = ["QUERY_CHECKER"]
|
||||||
|
@ -1,3 +1,21 @@
|
|||||||
|
"""For backwards compatibility."""
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from langchain._api import create_importer
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
from langchain_community.utilities.python import PythonREPL
|
from langchain_community.utilities.python import PythonREPL
|
||||||
|
|
||||||
|
|
||||||
|
_importer = create_importer(
|
||||||
|
__package__,
|
||||||
|
deprecated_lookups={"PythonREPL": "langchain_community.utilities.python"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
"""Look up attributes dynamically."""
|
||||||
|
return _importer(name)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["PythonREPL"]
|
__all__ = ["PythonREPL"]
|
||||||
|
Loading…
Reference in New Issue
Block a user