mirror of
https://github.com/hwchase17/langchain.git
synced 2025-05-16 20:42:01 +00:00
langchain[patch]: Revert breaking change until 0.2 release (#21256)
Reverts a minor breaking change until 0.2 release
This commit is contained in:
parent
66a1e3f083
commit
ba4a309d98
@ -1,11 +1,18 @@
|
||||
"""Toolkit for interacting with a vector store."""
|
||||
from typing import List
|
||||
|
||||
from langchain_community.agent_toolkits.base import BaseToolkit
|
||||
from langchain_community.llms.openai import OpenAI
|
||||
from langchain_community.tools.vectorstore.tool import (
|
||||
VectorStoreQATool,
|
||||
VectorStoreQAWithSourcesTool,
|
||||
)
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.tools import BaseTool, BaseToolkit
|
||||
from langchain_core.vectorstores import VectorStore
|
||||
|
||||
from langchain.tools import BaseTool
|
||||
|
||||
|
||||
class VectorStoreInfo(BaseModel):
|
||||
"""Information about a VectorStore."""
|
||||
@ -24,7 +31,7 @@ class VectorStoreToolkit(BaseToolkit):
|
||||
"""Toolkit for interacting with a Vector Store."""
|
||||
|
||||
vectorstore_info: VectorStoreInfo = Field(exclude=True)
|
||||
llm: BaseLanguageModel
|
||||
llm: BaseLanguageModel = Field(default_factory=lambda: OpenAI(temperature=0))
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
@ -33,15 +40,6 @@ class VectorStoreToolkit(BaseToolkit):
|
||||
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
try:
|
||||
from langchain_community.tools.vectorstore.tool import (
|
||||
VectorStoreQATool,
|
||||
VectorStoreQAWithSourcesTool,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"You need to install langchain-community to use this toolkit."
|
||||
)
|
||||
description = VectorStoreQATool.get_description(
|
||||
self.vectorstore_info.name, self.vectorstore_info.description
|
||||
)
|
||||
@ -67,7 +65,7 @@ class VectorStoreRouterToolkit(BaseToolkit):
|
||||
"""Toolkit for routing between Vector Stores."""
|
||||
|
||||
vectorstores: List[VectorStoreInfo] = Field(exclude=True)
|
||||
llm: BaseLanguageModel
|
||||
llm: BaseLanguageModel = Field(default_factory=lambda: OpenAI(temperature=0))
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
@ -77,14 +75,6 @@ class VectorStoreRouterToolkit(BaseToolkit):
|
||||
def get_tools(self) -> List[BaseTool]:
|
||||
"""Get the tools in the toolkit."""
|
||||
tools: List[BaseTool] = []
|
||||
try:
|
||||
from langchain_community.tools.vectorstore.tool import (
|
||||
VectorStoreQATool,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"You need to install langchain-community to use this toolkit."
|
||||
)
|
||||
for vectorstore_info in self.vectorstores:
|
||||
description = VectorStoreQATool.get_description(
|
||||
vectorstore_info.name, vectorstore_info.description
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_community.llms.openai import OpenAI
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
@ -67,11 +68,8 @@ class NatBotChain(Chain):
|
||||
@classmethod
|
||||
def from_default(cls, objective: str, **kwargs: Any) -> NatBotChain:
|
||||
"""Load with default LLMChain."""
|
||||
raise NotImplementedError(
|
||||
"This method is no longer implemented. Please use from_llm."
|
||||
"llm = OpenAI(temperature=0.5, best_of=10, n=3, max_tokens=50)"
|
||||
"For example, NatBotChain.from_llm(llm, objective)"
|
||||
)
|
||||
llm = OpenAI(temperature=0.5, best_of=10, n=3, max_tokens=50)
|
||||
return cls.from_llm(llm, objective, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_llm(
|
||||
|
@ -6,6 +6,8 @@ from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain_community.utilities.openapi import OpenAPISpec
|
||||
from langchain_core.callbacks import CallbackManagerForChainRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.output_parsers.openai_functions import JsonOutputFunctionsParser
|
||||
@ -19,7 +21,6 @@ from langchain.chains.sequential import SequentialChain
|
||||
from langchain.tools import APIOperation
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_community.utilities.openapi import OpenAPISpec
|
||||
from openapi_pydantic import Parameter
|
||||
|
||||
|
||||
@ -255,13 +256,6 @@ def get_openapi_chain(
|
||||
prompt: Main prompt template to use.
|
||||
request_chain: Chain for taking the functions output and executing the request.
|
||||
"""
|
||||
try:
|
||||
from langchain_community.utilities.openapi import OpenAPISpec
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"Could not import langchain_community.utilities.openapi. "
|
||||
"Please install it with `pip install langchain-community`."
|
||||
) from e
|
||||
if isinstance(spec, str):
|
||||
for conversion in (
|
||||
OpenAPISpec.from_url,
|
||||
@ -278,12 +272,9 @@ def get_openapi_chain(
|
||||
if isinstance(spec, str):
|
||||
raise ValueError(f"Unable to parse spec from source {spec}")
|
||||
openai_fns, call_api_fn = openapi_spec_to_openai_fn(spec)
|
||||
if not llm:
|
||||
raise ValueError(
|
||||
"Must provide an LLM for this chain.For example,\n"
|
||||
"from langchain_openai import ChatOpenAI\n"
|
||||
"llm = ChatOpenAI()\n"
|
||||
)
|
||||
llm = llm or ChatOpenAI(
|
||||
model="gpt-3.5-turbo-0613",
|
||||
)
|
||||
prompt = prompt or ChatPromptTemplate.from_template(
|
||||
"Use the provided API's to respond to this user query:\n\n{query}"
|
||||
)
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.prompts import PromptTemplate
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
@ -41,8 +42,6 @@ class MultiRetrievalQAChain(MultiRouteChain):
|
||||
default_retriever: Optional[BaseRetriever] = None,
|
||||
default_prompt: Optional[PromptTemplate] = None,
|
||||
default_chain: Optional[Chain] = None,
|
||||
*,
|
||||
default_chain_llm: Optional[BaseLanguageModel] = None,
|
||||
**kwargs: Any,
|
||||
) -> MultiRetrievalQAChain:
|
||||
if default_prompt and not default_retriever:
|
||||
@ -79,20 +78,8 @@ class MultiRetrievalQAChain(MultiRouteChain):
|
||||
prompt = PromptTemplate(
|
||||
template=prompt_template, input_variables=["history", "query"]
|
||||
)
|
||||
if default_chain_llm is None:
|
||||
raise NotImplementedError(
|
||||
"conversation_llm must be provided if default_chain is not "
|
||||
"specified. This API has been changed to avoid instantiating "
|
||||
"default LLMs on behalf of users."
|
||||
"You can provide a conversation LLM like so:\n"
|
||||
"from langchain_openai import ChatOpenAI\n"
|
||||
"llm = ChatOpenAI()"
|
||||
)
|
||||
_default_chain = ConversationChain(
|
||||
llm=default_chain_llm,
|
||||
prompt=prompt,
|
||||
input_key="query",
|
||||
output_key="result",
|
||||
llm=ChatOpenAI(), prompt=prompt, input_key="query", output_key="result"
|
||||
)
|
||||
return cls(
|
||||
router_chain=router_chain,
|
||||
|
@ -1,6 +1,9 @@
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
from langchain_core.document_loaders import BaseLoader
|
||||
from langchain_community.document_loaders.base import BaseLoader
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.llms.openai import OpenAI
|
||||
from langchain_community.vectorstores.inmemory import InMemoryVectorStore
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
@ -35,14 +38,7 @@ class VectorStoreIndexWrapper(BaseModel):
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""Query the vectorstore."""
|
||||
if llm is None:
|
||||
raise NotImplementedError(
|
||||
"This API has been changed to require an LLM. "
|
||||
"Please provide an llm to use for querying the vectorstore.\n"
|
||||
"For example,\n"
|
||||
"from langchain_openai import OpenAI\n"
|
||||
"llm = OpenAI(temperature=0)"
|
||||
)
|
||||
llm = llm or OpenAI(temperature=0)
|
||||
retriever_kwargs = retriever_kwargs or {}
|
||||
chain = RetrievalQA.from_chain_type(
|
||||
llm, retriever=self.vectorstore.as_retriever(**retriever_kwargs), **kwargs
|
||||
@ -57,14 +53,7 @@ class VectorStoreIndexWrapper(BaseModel):
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""Query the vectorstore."""
|
||||
if llm is None:
|
||||
raise NotImplementedError(
|
||||
"This API has been changed to require an LLM. "
|
||||
"Please provide an llm to use for querying the vectorstore.\n"
|
||||
"For example,\n"
|
||||
"from langchain_openai import OpenAI\n"
|
||||
"llm = OpenAI(temperature=0)"
|
||||
)
|
||||
llm = llm or OpenAI(temperature=0)
|
||||
retriever_kwargs = retriever_kwargs or {}
|
||||
chain = RetrievalQA.from_chain_type(
|
||||
llm, retriever=self.vectorstore.as_retriever(**retriever_kwargs), **kwargs
|
||||
@ -79,14 +68,7 @@ class VectorStoreIndexWrapper(BaseModel):
|
||||
**kwargs: Any,
|
||||
) -> dict:
|
||||
"""Query the vectorstore and get back sources."""
|
||||
if llm is None:
|
||||
raise NotImplementedError(
|
||||
"This API has been changed to require an LLM. "
|
||||
"Please provide an llm to use for querying the vectorstore.\n"
|
||||
"For example,\n"
|
||||
"from langchain_openai import OpenAI\n"
|
||||
"llm = OpenAI(temperature=0)"
|
||||
)
|
||||
llm = llm or OpenAI(temperature=0)
|
||||
retriever_kwargs = retriever_kwargs or {}
|
||||
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
||||
llm, retriever=self.vectorstore.as_retriever(**retriever_kwargs), **kwargs
|
||||
@ -101,14 +83,7 @@ class VectorStoreIndexWrapper(BaseModel):
|
||||
**kwargs: Any,
|
||||
) -> dict:
|
||||
"""Query the vectorstore and get back sources."""
|
||||
if llm is None:
|
||||
raise NotImplementedError(
|
||||
"This API has been changed to require an LLM. "
|
||||
"Please provide an llm to use for querying the vectorstore.\n"
|
||||
"For example,\n"
|
||||
"from langchain_openai import OpenAI\n"
|
||||
"llm = OpenAI(temperature=0)"
|
||||
)
|
||||
llm = llm or OpenAI(temperature=0)
|
||||
retriever_kwargs = retriever_kwargs or {}
|
||||
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
||||
llm, retriever=self.vectorstore.as_retriever(**retriever_kwargs), **kwargs
|
||||
@ -116,31 +91,11 @@ class VectorStoreIndexWrapper(BaseModel):
|
||||
return await chain.ainvoke({chain.question_key: question})
|
||||
|
||||
|
||||
def _get_in_memory_vectorstore() -> Type[VectorStore]:
|
||||
"""Get the InMemoryVectorStore."""
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from langchain_community.vectorstores.inmemory import InMemoryVectorStore
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Please install langchain-community to use the InMemoryVectorStore."
|
||||
)
|
||||
warnings.warn(
|
||||
"Using InMemoryVectorStore as the default vectorstore."
|
||||
"This memory store won't persist data. You should explicitly"
|
||||
"specify a vectorstore when using VectorstoreIndexCreator"
|
||||
)
|
||||
return InMemoryVectorStore
|
||||
|
||||
|
||||
class VectorstoreIndexCreator(BaseModel):
|
||||
"""Logic for creating indexes."""
|
||||
|
||||
vectorstore_cls: Type[VectorStore] = Field(
|
||||
default_factory=_get_in_memory_vectorstore
|
||||
)
|
||||
embedding: Embeddings
|
||||
vectorstore_cls: Type[VectorStore] = InMemoryVectorStore
|
||||
embedding: Embeddings = Field(default_factory=OpenAIEmbeddings)
|
||||
text_splitter: TextSplitter = Field(default_factory=_get_default_text_splitter)
|
||||
vectorstore_kwargs: dict = Field(default_factory=dict)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user