docs, templates: update schema imports to core (#17885)

- chat models, messages
- documents
- agentaction/finish
- baseretriever,document
- stroutputparser
- more messages
- basemessage
- format_document
- baseoutputparser

---------

Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Erick Friis
2024-02-22 15:58:44 -08:00
committed by GitHub
parent 971d29e718
commit ed789be8f4
148 changed files with 237 additions and 206 deletions

View File

@@ -3,8 +3,8 @@ import os
import cassio
import langchain
from langchain.cache import CassandraCache
from langchain.schema import BaseMessage
from langchain_community.chat_models import ChatOpenAI
from langchain_core.messages import BaseMessage
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableLambda

View File

@@ -1,7 +1,7 @@
from langchain import hub
from langchain.schema import StrOutputParser
from langchain_community.chat_models import ChatAnthropic
from langchain_community.utilities import WikipediaAPIWrapper
from langchain_core.output_parsers import StrOutputParser
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import RunnableLambda, RunnablePassthrough

View File

@@ -69,7 +69,7 @@ from functools import partial
from typing import Dict, Optional, Callable, List
from langserve import RemoteRunnable
from langchain.callbacks.manager import tracing_v2_enabled
from langchain.schema import BaseMessage, AIMessage, HumanMessage
from langchain_core.messages import BaseMessage, AIMessage, HumanMessage
# Update with the URL provided by your LangServe server
chain = RemoteRunnable("http://127.0.0.1:8031/chat-bot-feedback")

View File

@@ -3,10 +3,10 @@ from typing import List, Optional
from langchain.chains.openai_functions import (
create_structured_output_chain,
)
from langchain.schema import Document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.graphs import Neo4jGraph
from langchain_community.graphs.graph_document import GraphDocument
from langchain_core.documents import Document
from langchain_core.prompts import ChatPromptTemplate
from neo4j_generation.utils import (

View File

@@ -5,9 +5,9 @@ from langchain.agents.format_scratchpad import format_to_openai_function_message
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.pydantic_v1 import BaseModel, Field
from langchain.schema import AIMessage, HumanMessage
from langchain.tools.render import format_tool_to_openai_function
from langchain_community.chat_models import ChatOpenAI
from langchain_core.messages import AIMessage, HumanMessage
from neo4j_semantic_layer.information_tool import InformationTool
from neo4j_semantic_layer.memory_tool import MemoryTool
@@ -45,9 +45,9 @@ def _format_chat_history(chat_history: List[Tuple[str, str]]):
agent = (
{
"input": lambda x: x["input"],
"chat_history": lambda x: _format_chat_history(x["chat_history"])
if x.get("chat_history")
else [],
"chat_history": lambda x: (
_format_chat_history(x["chat_history"]) if x.get("chat_history") else []
),
"agent_scratchpad": lambda x: format_to_openai_function_messages(
x["intermediate_steps"]
),

View File

@@ -8,9 +8,9 @@ from langchain.agents.output_parsers import (
)
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.pydantic_v1 import BaseModel, Field
from langchain.schema import AIMessage, HumanMessage
from langchain.tools.render import render_text_description_and_args
from langchain_community.chat_models import ChatOllama
from langchain_core.messages import AIMessage, HumanMessage
from neo4j_semantic_ollama.information_tool import InformationTool
from neo4j_semantic_ollama.memory_tool import MemoryTool
@@ -87,9 +87,9 @@ agent = (
{
"input": lambda x: x["input"],
"agent_scratchpad": lambda x: format_log_to_messages(x["intermediate_steps"]),
"chat_history": lambda x: _format_chat_history(x["chat_history"])
if x.get("chat_history")
else [],
"chat_history": lambda x: (
_format_chat_history(x["chat_history"]) if x.get("chat_history") else []
),
}
| prompt
| chat_model_with_stop

View File

@@ -1,8 +1,8 @@
from typing import Any, Dict, List, Union
from langchain.memory import ChatMessageHistory
from langchain.schema import AIMessage, HumanMessage
from langchain_community.graphs import Neo4jGraph
from langchain_core.messages import AIMessage, HumanMessage
graph = Neo4jGraph()

View File

@@ -6,13 +6,13 @@ from langchain.agents import (
)
from langchain.agents.format_scratchpad import format_to_openai_functions
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.schema import Document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.tools.convert_to_openai import format_tool_to_openai_function
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper
from langchain_community.vectorstores import FAISS
from langchain_core.documents import Document
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import (
ChatPromptTemplate,

View File

@@ -2,13 +2,16 @@ import os
from operator import itemgetter
from typing import List, Tuple
from langchain.schema import AIMessage, HumanMessage, format_document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.vectorstores.zep import CollectionConfig, ZepVectorStore
from langchain_core.documents import Document
from langchain_core.messages import BaseMessage
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.prompts import (
ChatPromptTemplate,
MessagesPlaceholder,
format_document,
)
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import (

View File

@@ -2,11 +2,15 @@ import os
from operator import itemgetter
from typing import List, Tuple
from langchain.schema import AIMessage, HumanMessage, format_document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.prompts import (
ChatPromptTemplate,
MessagesPlaceholder,
format_document,
)
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import (

View File

@@ -1,11 +1,12 @@
from operator import itemgetter
from typing import List, Optional, Tuple
from langchain.schema import BaseMessage, format_document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores.elasticsearch import ElasticsearchStore
from langchain_core.messages import BaseMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import format_document
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import RunnableParallel, RunnablePassthrough

View File

@@ -1,11 +1,11 @@
import json
from pathlib import Path
from langchain.schema import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import Chroma
from langchain_core.documents import Document
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel

View File

@@ -7,10 +7,10 @@ from langchain.retrievers import (
PubMedRetriever,
WikipediaRetriever,
)
from langchain.schema import StrOutputParser
from langchain.utils.math import cosine_similarity
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import (

View File

@@ -8,9 +8,9 @@ from langchain.retrievers import (
PubMedRetriever,
WikipediaRetriever,
)
from langchain.schema import StrOutputParser
from langchain.utils.openai_functions import convert_pydantic_to_openai_function
from langchain_community.chat_models import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import (

View File

@@ -3,11 +3,11 @@ from operator import itemgetter
from typing import List, Tuple
from langchain.retrievers import SelfQueryRetriever
from langchain.schema import format_document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores.elasticsearch import ElasticsearchStore
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import format_document
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import RunnableParallel, RunnablePassthrough

View File

@@ -4,12 +4,16 @@ from operator import itemgetter
from typing import List, Optional, Tuple
from dotenv import find_dotenv, load_dotenv
from langchain.schema import AIMessage, HumanMessage, format_document
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores.timescalevector import TimescaleVector
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.prompts import (
ChatPromptTemplate,
MessagesPlaceholder,
format_document,
)
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import (
@@ -147,12 +151,16 @@ _inputs = RunnableParallel(
)
_datetime_to_string = RunnablePassthrough.assign(
start_date=lambda x: x.get("start_date", None).isoformat()
if x.get("start_date", None) is not None
else None,
end_date=lambda x: x.get("end_date", None).isoformat()
if x.get("end_date", None) is not None
else None,
start_date=lambda x: (
x.get("start_date", None).isoformat()
if x.get("start_date", None) is not None
else None
),
end_date=lambda x: (
x.get("end_date", None).isoformat()
if x.get("end_date", None) is not None
else None
),
).with_types(input_type=ChatHistory)
chain = (

View File

@@ -5,12 +5,13 @@ from langchain.agents import AgentExecutor
from langchain.agents.format_scratchpad import format_log_to_str
from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser
from langchain.callbacks.manager import CallbackManagerForRetrieverRun
from langchain.schema import BaseRetriever, Document
from langchain.tools.render import render_text_description
from langchain.tools.retriever import create_retriever_tool
from langchain_community.chat_models.fireworks import ChatFireworks
from langchain_community.utilities.arxiv import ArxivAPIWrapper
from langchain_core.documents import Document
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.retrievers import BaseRetriever
MODEL_ID = "accounts/fireworks/models/mixtral-8x7b-instruct"

View File

@@ -5,13 +5,14 @@ from langchain.agents import AgentExecutor
from langchain.agents.format_scratchpad import format_to_openai_function_messages
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.callbacks.manager import CallbackManagerForRetrieverRun
from langchain.schema import BaseRetriever, Document
from langchain.tools.retriever import create_retriever_tool
from langchain_community.tools.convert_to_openai import format_tool_to_openai_function
from langchain_community.utilities.arxiv import ArxivAPIWrapper
from langchain_core.documents import Document
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.retrievers import BaseRetriever
from langchain_openai import AzureChatOpenAI

View File

@@ -63,7 +63,7 @@ You can find the documents in the `packages/self-query-qdrant/self_query_qdrant/
Here is one of the documents:
```python
from langchain.schema import Document
from langchain_core.documents import Document
Document(
page_content="Spaghetti with meatballs and tomato sauce",
@@ -108,7 +108,7 @@ chain = create_chain(
The same goes for the `initialize` function that creates a Qdrant collection and indexes the documents:
```python
from langchain.schema import Document
from langchain_core.documents import Document
from langchain_community.embeddings import HuggingFaceEmbeddings
from self_query_qdrant.chain import initialize

View File

@@ -3,11 +3,11 @@ from typing import List, Optional
from langchain.chains.query_constructor.schema import AttributeInfo
from langchain.retrievers import SelfQueryRetriever
from langchain.schema import Document, StrOutputParser
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.llms import BaseLLM
from langchain_community.llms.openai import OpenAI
from langchain_community.vectorstores.qdrant import Qdrant
from langchain_core.documents import Document, StrOutputParser
from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import RunnableParallel, RunnablePassthrough

View File

@@ -1,5 +1,5 @@
from langchain.chains.query_constructor.schema import AttributeInfo
from langchain.schema import Document
from langchain_core.documents import Document
# Qdrant collection name
DEFAULT_COLLECTION_NAME = "restaurants"

View File

@@ -1,7 +1,7 @@
from string import Formatter
from typing import List
from langchain.schema import Document
from langchain_core.documents import Document
document_template = """
PASSAGE: {page_content}

View File

@@ -1,4 +1,4 @@
from langchain.schema import AgentAction, AgentFinish
from langchain_core.agents import AgentAction, AgentFinish
def parse_output(message: str):

View File

@@ -2,10 +2,10 @@ from typing import List, Tuple
from langchain.agents import AgentExecutor
from langchain.agents.format_scratchpad import format_xml
from langchain.schema import AIMessage, HumanMessage
from langchain.tools import DuckDuckGoSearchRun
from langchain.tools.render import render_text_description
from langchain_community.chat_models import ChatAnthropic
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.pydantic_v1 import BaseModel, Field
from xml_agent.prompts import conversational_prompt, parse_output

View File

@@ -1,4 +1,4 @@
from langchain.schema import AgentAction, AgentFinish
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
template = """You are a helpful assistant. Help the user answer any questions.