Compare commits

...

2 Commits

Author SHA1 Message Date
vowelparrot
215ab8a62e Update internal imports 2023-06-09 12:24:19 -07:00
vowelparrot
198955575d Move schema to directory 2023-06-09 12:12:26 -07:00
228 changed files with 368 additions and 255 deletions

View File

@@ -29,7 +29,7 @@ from langchain.input import get_color_mapping
from langchain.prompts.base import BasePromptTemplate
from langchain.prompts.few_shot import FewShotPromptTemplate
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import (
from langchain.schema.base import (
AgentAction,
AgentFinish,
BaseMessage,

View File

@@ -20,7 +20,7 @@ from langchain.prompts.chat import (
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain.schema import AgentAction
from langchain.schema.base import AgentAction
from langchain.tools.base import BaseTool

View File

@@ -3,7 +3,7 @@ from typing import Union
from langchain.agents.agent import AgentOutputParser
from langchain.agents.chat.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
FINAL_ANSWER_ACTION = "Final Answer:"

View File

@@ -3,7 +3,7 @@ from typing import Union
from langchain.agents.agent import AgentOutputParser
from langchain.agents.conversational.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
class ConvoOutputParser(AgentOutputParser):

View File

@@ -23,7 +23,7 @@ from langchain.prompts.chat import (
MessagesPlaceholder,
SystemMessagePromptTemplate,
)
from langchain.schema import (
from langchain.schema.base import (
AgentAction,
AIMessage,
BaseMessage,

View File

@@ -5,7 +5,7 @@ from typing import Union
from langchain.agents import AgentOutputParser
from langchain.agents.conversational_chat.prompt import FORMAT_INSTRUCTIONS
from langchain.output_parsers.json import parse_json_markdown
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
class ConvoOutputParser(AgentOutputParser):

View File

@@ -3,7 +3,7 @@ from typing import Union
from langchain.agents.agent import AgentOutputParser
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
FINAL_ANSWER_ACTION = "Final Answer:"

View File

@@ -2,7 +2,7 @@ import re
from typing import Union
from langchain.agents.agent import AgentOutputParser
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
class ReActOutputParser(AgentOutputParser):

View File

@@ -1,7 +1,7 @@
from typing import Any, Dict, List, Tuple
from langchain.prompts.chat import ChatPromptTemplate
from langchain.schema import AgentAction
from langchain.schema.base import AgentAction
class AgentScratchPadChatPromptTemplate(ChatPromptTemplate):

View File

@@ -1,7 +1,7 @@
from typing import Sequence, Union
from langchain.agents.agent import AgentOutputParser
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
class SelfAskOutputParser(AgentOutputParser):

View File

@@ -17,7 +17,7 @@ from langchain.prompts.chat import (
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain.schema import AgentAction
from langchain.schema.base import AgentAction
from langchain.tools import BaseTool
HUMAN_MESSAGE_TEMPLATE = "{input}\n\n{agent_scratchpad}"

View File

@@ -11,7 +11,7 @@ from langchain.agents.agent import AgentOutputParser
from langchain.agents.structured_chat.prompt import FORMAT_INSTRUCTIONS
from langchain.base_language import BaseLanguageModel
from langchain.output_parsers import OutputFixingParser
from langchain.schema import AgentAction, AgentFinish, OutputParserException
from langchain.schema.base import AgentAction, AgentFinish, OutputParserException
logger = logging.getLogger(__name__)

View File

@@ -7,7 +7,7 @@ from typing import List, Optional, Sequence, Set
from pydantic import BaseModel
from langchain.callbacks.manager import Callbacks
from langchain.schema import BaseMessage, LLMResult, PromptValue, get_buffer_string
from langchain.schema.base import BaseMessage, LLMResult, PromptValue, get_buffer_string
def _get_token_ids_default_method(text: str) -> List[int]:

View File

@@ -31,7 +31,7 @@ except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from langchain.embeddings.base import Embeddings
from langchain.schema import Generation
from langchain.schema.base import Generation
from langchain.vectorstores.redis import Redis as RedisVectorstore
if TYPE_CHECKING:

View File

@@ -2,7 +2,7 @@ from copy import deepcopy
from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
def import_aim() -> Any:

View File

@@ -3,7 +3,7 @@ import warnings
from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
class ArgillaCallbackHandler(BaseCallbackHandler):

View File

@@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional, Union
import pandas as pd
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
class ArizeCallbackHandler(BaseCallbackHandler):

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Any, Dict, List, Optional, Union
from uuid import UUID
from langchain.schema import (
from langchain.schema.base import (
AgentAction,
AgentFinish,
BaseMessage,

View File

@@ -13,7 +13,7 @@ from langchain.callbacks.utils import (
import_textstat,
load_json,
)
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
def import_clearml() -> Any:

View File

@@ -12,7 +12,7 @@ from langchain.callbacks.utils import (
import_spacy,
import_textstat,
)
from langchain.schema import AgentAction, AgentFinish, Generation, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, Generation, LLMResult
LANGCHAIN_MODEL_NAME = "langchain-model"

View File

@@ -3,7 +3,7 @@ from typing import Any, Dict, Optional, TextIO, cast
from langchain.callbacks.base import BaseCallbackHandler
from langchain.input import print_text
from langchain.schema import AgentAction, AgentFinish
from langchain.schema.base import AgentAction, AgentFinish
class FileCallbackHandler(BaseCallbackHandler):

View File

@@ -36,7 +36,7 @@ from langchain.callbacks.tracers.langchain import LangChainTracer
from langchain.callbacks.tracers.langchain_v1 import LangChainTracerV1, TracerSessionV1
from langchain.callbacks.tracers.stdout import ConsoleCallbackHandler
from langchain.callbacks.tracers.wandb import WandbTracer
from langchain.schema import (
from langchain.schema.base import (
AgentAction,
AgentFinish,
BaseMessage,

View File

@@ -15,7 +15,7 @@ from langchain.callbacks.utils import (
import_spacy,
import_textstat,
)
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
from langchain.utils import get_from_dict_or_env

View File

@@ -2,7 +2,7 @@
from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
MODEL_COST_PER_1K_TOKENS = {
"gpt-4": 0.03,

View File

@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.input import print_text
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
class StdOutCallbackHandler(BaseCallbackHandler):

View File

@@ -4,7 +4,7 @@ import asyncio
from typing import Any, AsyncIterator, Dict, List, Literal, Union, cast
from langchain.callbacks.base import AsyncCallbackHandler
from langchain.schema import LLMResult
from langchain.schema.base import LLMResult
# TODO If used by two LLM runs in parallel this won't work as expected

View File

@@ -3,7 +3,7 @@ import sys
from typing import Any, Dict, List, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
class StreamingStdOutCallbackHandler(BaseCallbackHandler):

View File

@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional, Union
import streamlit as st
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
class StreamlitCallbackHandler(BaseCallbackHandler):

View File

@@ -8,7 +8,7 @@ from uuid import UUID
from langchain.callbacks.base import BaseCallbackHandler
from langchain.callbacks.tracers.schemas import Run, RunTypeEnum
from langchain.schema import LLMResult
from langchain.schema.base import LLMResult
class TracerException(Exception):

View File

@@ -13,7 +13,7 @@ from langchainplus_sdk import LangChainPlusClient
from langchain.callbacks.tracers.base import BaseTracer
from langchain.callbacks.tracers.schemas import Run, RunTypeEnum, TracerSession
from langchain.env import get_runtime_environment
from langchain.schema import BaseMessage, messages_to_dict
from langchain.schema.base import BaseMessage, messages_to_dict
logger = logging.getLogger(__name__)

View File

@@ -16,7 +16,7 @@ from langchain.callbacks.tracers.schemas import (
TracerSessionV1,
TracerSessionV1Base,
)
from langchain.schema import get_buffer_string
from langchain.schema.base import get_buffer_string
from langchain.utils import raise_for_status_with_text

View File

@@ -9,7 +9,7 @@ from uuid import UUID
from pydantic import BaseModel, Field, root_validator
from langchain.env import get_runtime_environment
from langchain.schema import LLMResult
from langchain.schema.base import LLMResult
class TracerSessionV1Base(BaseModel):

View File

@@ -13,7 +13,7 @@ from langchain.callbacks.utils import (
import_spacy,
import_textstat,
)
from langchain.schema import AgentAction, AgentFinish, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, LLMResult
def import_wandb() -> Any:

View File

@@ -4,7 +4,7 @@ import logging
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, Generation, LLMResult
from langchain.schema.base import AgentAction, AgentFinish, Generation, LLMResult
from langchain.utils import get_from_env
if TYPE_CHECKING:

View File

@@ -8,7 +8,7 @@ from langchain.base_language import BaseLanguageModel
from langchain.chains.api.openapi.prompts import REQUEST_TEMPLATE
from langchain.chains.llm import LLMChain
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import BaseOutputParser
from langchain.schema.base import BaseOutputParser
class APIRequesterOutputParser(BaseOutputParser):

View File

@@ -8,7 +8,7 @@ from langchain.base_language import BaseLanguageModel
from langchain.chains.api.openapi.prompts import RESPONSE_TEMPLATE
from langchain.chains.llm import LLMChain
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import BaseOutputParser
from langchain.schema.base import BaseOutputParser
class APIResponderOutputParser(BaseOutputParser):

View File

@@ -18,7 +18,7 @@ from langchain.callbacks.manager import (
CallbackManagerForChainRun,
Callbacks,
)
from langchain.schema import RUN_KEY, BaseMemory, RunInfo
from langchain.schema.base import RUN_KEY, BaseMemory, RunInfo
def _get_verbosity() -> bool:

View File

@@ -7,7 +7,7 @@ from langchain.chains.conversation.prompt import PROMPT
from langchain.chains.llm import LLMChain
from langchain.memory.buffer import ConversationBufferMemory
from langchain.prompts.base import BasePromptTemplate
from langchain.schema import BaseMemory
from langchain.schema.base import BaseMemory
class ConversationChain(LLMChain):

View File

@@ -21,7 +21,7 @@ from langchain.chains.conversational_retrieval.prompts import CONDENSE_QUESTION_
from langchain.chains.llm import LLMChain
from langchain.chains.question_answering import load_qa_chain
from langchain.prompts.base import BasePromptTemplate
from langchain.schema import BaseMessage, BaseRetriever, Document
from langchain.schema.base import BaseMessage, BaseRetriever, Document
from langchain.vectorstores.base import VectorStore
# Depending on the memory type and configuration, the chat history format may differ.

View File

@@ -20,7 +20,7 @@ from langchain.chains.flare.prompts import (
from langchain.chains.llm import LLMChain
from langchain.llms import OpenAI
from langchain.prompts import BasePromptTemplate
from langchain.schema import BaseRetriever, Generation
from langchain.schema.base import BaseRetriever, Generation
class _ResponseChain(LLMChain):

View File

@@ -1,7 +1,7 @@
from typing import Tuple
from langchain.prompts import PromptTemplate
from langchain.schema import BaseOutputParser
from langchain.schema.base import BaseOutputParser
class FinishedOutputParser(BaseOutputParser[Tuple[str, bool]]):

View File

@@ -17,7 +17,7 @@ from langchain.chains.base import Chain
from langchain.input import get_colored_text
from langchain.prompts.base import BasePromptTemplate
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import LLMResult, PromptValue
from langchain.schema.base import LLMResult, PromptValue
class LLMChain(Chain):

View File

@@ -13,7 +13,7 @@ from langchain.chains.base import Chain
from langchain.chains.llm import LLMChain
from langchain.chains.llm_bash.prompt import PROMPT
from langchain.prompts.base import BasePromptTemplate
from langchain.schema import OutputParserException
from langchain.schema.base import OutputParserException
from langchain.utilities.bash import BashProcess
logger = logging.getLogger(__name__)

View File

@@ -5,7 +5,7 @@ import re
from typing import List
from langchain.prompts.prompt import PromptTemplate
from langchain.schema import BaseOutputParser, OutputParserException
from langchain.schema.base import BaseOutputParser, OutputParserException
_PROMPT_TEMPLATE = """If someone asks you to perform a task, your job is to come up with a series of bash commands that will perform the task. There is no need to put "#!/bin/bash" in your answer. Make sure to reason step by step, using this format:

View File

@@ -7,7 +7,7 @@ from pydantic import Field
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
from langchain.docstore.document import Document
from langchain.schema import BaseRetriever
from langchain.schema.base import BaseRetriever
class RetrievalQAWithSourcesChain(BaseQAWithSourcesChain):

View File

@@ -23,7 +23,7 @@ from langchain.chains.query_constructor.prompt import (
)
from langchain.chains.query_constructor.schema import AttributeInfo
from langchain.output_parsers.json import parse_and_check_json_markdown
from langchain.schema import BaseOutputParser, OutputParserException
from langchain.schema.base import BaseOutputParser, OutputParserException
class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):

View File

@@ -19,7 +19,7 @@ from langchain.chains.llm import LLMChain
from langchain.chains.question_answering import load_qa_chain
from langchain.chains.question_answering.stuff_prompt import PROMPT_SELECTOR
from langchain.prompts import PromptTemplate
from langchain.schema import BaseRetriever, Document
from langchain.schema.base import BaseRetriever, Document
from langchain.vectorstores.base import VectorStore

View File

@@ -14,7 +14,7 @@ from langchain.chains import LLMChain
from langchain.chains.router.base import RouterChain
from langchain.output_parsers.json import parse_and_check_json_markdown
from langchain.prompts import BasePromptTemplate
from langchain.schema import BaseOutputParser, OutputParserException
from langchain.schema.base import BaseOutputParser, OutputParserException
class LLMRouterChain(RouterChain):

View File

@@ -15,7 +15,7 @@ from langchain.chains.router.multi_retrieval_prompt import (
)
from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.schema import BaseRetriever
from langchain.schema.base import BaseRetriever
class MultiRetrievalQAChain(MultiRouteChain):

View File

@@ -8,7 +8,7 @@ from langchain.callbacks.manager import (
)
from langchain.chat_models.base import BaseChatModel
from langchain.llms.anthropic import _AnthropicCommon
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
ChatGeneration,

View File

@@ -7,7 +7,7 @@ from typing import Any, Dict, Mapping
from pydantic import root_validator
from langchain.chat_models.openai import ChatOpenAI
from langchain.schema import ChatResult
from langchain.schema.base import ChatResult
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__name__)

View File

@@ -17,7 +17,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
Callbacks,
)
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
ChatGeneration,

View File

@@ -18,7 +18,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
ChatGeneration,

View File

@@ -29,7 +29,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
ChatGeneration,

View File

@@ -7,7 +7,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.chat_models import ChatOpenAI
from langchain.schema import BaseMessage, ChatResult
from langchain.schema.base import BaseMessage, ChatResult
class PromptLayerChatOpenAI(ChatOpenAI):

View File

@@ -10,7 +10,7 @@ from langchain.callbacks.manager import (
)
from langchain.chat_models.base import BaseChatModel
from langchain.llms.vertexai import _VertexAICommon
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
ChatGeneration,

View File

@@ -17,7 +17,7 @@ from langchain.callbacks.tracers.langchain import LangChainTracer
from langchain.chains.base import Chain
from langchain.chat_models.base import BaseChatModel
from langchain.llms.base import BaseLLM
from langchain.schema import (
from langchain.schema.base import (
BaseMessage,
ChatResult,
HumanMessage,

View File

@@ -1,7 +1,7 @@
from typing import Callable, Union
from langchain.docstore.base import Docstore
from langchain.schema import Document
from langchain.schema.base import Document
class DocstoreFn(Docstore):

View File

@@ -1,3 +1,3 @@
from langchain.schema import Document
from langchain.schema.base import Document
__all__ = ["Document"]

View File

@@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
from typing import Iterator, List, Optional
from langchain.document_loaders.blob_loaders import Blob
from langchain.schema import Document
from langchain.schema.base import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter

View File

@@ -6,7 +6,7 @@ from typing import Iterator, List, Literal, Optional, Sequence, Union
from langchain.document_loaders.base import BaseBlobParser, BaseLoader
from langchain.document_loaders.blob_loaders import BlobLoader, FileSystemBlobLoader
from langchain.document_loaders.parsers.registry import get_parser
from langchain.schema import Document
from langchain.schema.base import Document
from langchain.text_splitter import TextSplitter
_PathLike = Union[str, Path]

View File

@@ -4,7 +4,7 @@ from datetime import datetime
from typing import Iterator, List, Optional
from langchain.document_loaders.base import BaseLoader
from langchain.schema import Document
from langchain.schema.base import Document
from langchain.utils import get_from_env
LINK_NOTE_TEMPLATE = "joplin://x-callback-url/openNote?id={id}"

View File

@@ -2,7 +2,7 @@ from typing import Iterator
from langchain.document_loaders.base import BaseBlobParser
from langchain.document_loaders.blob_loaders import Blob
from langchain.schema import Document
from langchain.schema.base import Document
class OpenAIWhisperParser(BaseBlobParser):

View File

@@ -6,7 +6,7 @@ from typing import Iterator, Mapping, Optional
from langchain.document_loaders.base import BaseBlobParser
from langchain.document_loaders.blob_loaders.schema import Blob
from langchain.schema import Document
from langchain.schema.base import Document
class MimeTypeBasedParser(BaseBlobParser):

View File

@@ -3,7 +3,7 @@ from typing import Any, Iterator, Mapping, Optional
from langchain.document_loaders.base import BaseBlobParser
from langchain.document_loaders.blob_loaders import Blob
from langchain.schema import Document
from langchain.schema.base import Document
class PyPDFParser(BaseBlobParser):

View File

@@ -3,7 +3,7 @@ from typing import Iterator
from langchain.document_loaders.base import BaseBlobParser
from langchain.document_loaders.blob_loaders import Blob
from langchain.schema import Document
from langchain.schema.base import Document
class TextParser(BaseBlobParser):

View File

@@ -4,7 +4,7 @@ import re
from typing import Any, Callable, Generator, Iterable, List, Optional
from langchain.document_loaders.web_base import WebBaseLoader
from langchain.schema import Document
from langchain.schema.base import Document
def _default_parsing_function(content: Any) -> str:

View File

@@ -6,7 +6,7 @@ from pydantic import BaseModel, Field
from langchain.embeddings.base import Embeddings
from langchain.math_utils import cosine_similarity
from langchain.schema import BaseDocumentTransformer, Document
from langchain.schema.base import BaseDocumentTransformer, Document
class _DocumentWithState(Document):

View File

@@ -6,7 +6,7 @@ from langchain.chains.base import Chain
from langchain.chains.llm import LLMChain
from langchain.chat_models import ChatOpenAI
from langchain.evaluation.agents.trajectory_eval_prompt import EVAL_CHAT_PROMPT
from langchain.schema import AgentAction, BaseOutputParser, OutputParserException
from langchain.schema.base import AgentAction, BaseOutputParser, OutputParserException
from langchain.tools.base import BaseTool

View File

@@ -1,8 +1,8 @@
"""Prompt for trajectory evaluation chain."""
# flake8: noqa
from langchain.schema import AIMessage
from langchain.schema import HumanMessage
from langchain.schema import SystemMessage
from langchain.schema.base import AIMessage
from langchain.schema.base import HumanMessage
from langchain.schema.base import SystemMessage
from langchain.prompts.chat import (
ChatPromptTemplate,

View File

@@ -12,7 +12,7 @@ from langchain.callbacks.manager import (
)
from langchain.chains.base import Chain
from langchain.chains.llm import LLMChain
from langchain.schema import RUN_KEY, BaseOutputParser
from langchain.schema.base import RUN_KEY, BaseOutputParser
class RunEvaluatorInputMapper:

View File

@@ -14,7 +14,7 @@ from langchain.experimental.autonomous_agents.autogpt.prompt import AutoGPTPromp
from langchain.experimental.autonomous_agents.autogpt.prompt_generator import (
FINISH_NAME,
)
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
Document,

View File

@@ -3,7 +3,7 @@ import re
from abc import abstractmethod
from typing import Dict, NamedTuple
from langchain.schema import BaseOutputParser
from langchain.schema.base import BaseOutputParser
class AutoGPTAction(NamedTuple):

View File

@@ -7,7 +7,7 @@ from langchain.experimental.autonomous_agents.autogpt.prompt_generator import ge
from langchain.prompts.chat import (
BaseChatPromptTemplate,
)
from langchain.schema import BaseMessage, HumanMessage, SystemMessage
from langchain.schema.base import BaseMessage, HumanMessage, SystemMessage
from langchain.tools.base import BaseTool
from langchain.vectorstores.base import VectorStoreRetriever

View File

@@ -7,7 +7,7 @@ from langchain import LLMChain
from langchain.base_language import BaseLanguageModel
from langchain.prompts import PromptTemplate
from langchain.retrievers import TimeWeightedVectorStoreRetriever
from langchain.schema import BaseMemory, Document
from langchain.schema.base import BaseMemory, Document
from langchain.utils import mock_now
logger = logging.getLogger(__name__)

View File

@@ -9,7 +9,7 @@ from langchain.experimental.plan_and_execute.schema import (
Step,
)
from langchain.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain.schema import SystemMessage
from langchain.schema.base import SystemMessage
SYSTEM_PROMPT = (
"Let's first understand the problem and devise a plan to solve the problem."

View File

@@ -3,7 +3,7 @@ from typing import List, Tuple
from pydantic import BaseModel, Field
from langchain.schema import BaseOutputParser
from langchain.schema.base import BaseOutputParser
class Step(BaseModel):

View File

@@ -9,7 +9,7 @@ from langchain.document_loaders.base import BaseLoader
from langchain.embeddings.base import Embeddings
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms.openai import OpenAI
from langchain.schema import Document
from langchain.schema.base import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter
from langchain.vectorstores.base import VectorStore
from langchain.vectorstores.chroma import Chroma

View File

@@ -19,7 +19,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
Callbacks,
)
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseMessage,
Generation,

View File

@@ -18,7 +18,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.llms import BaseLLM
from langchain.schema import Generation, LLMResult
from langchain.schema.base import Generation, LLMResult
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__name__)

View File

@@ -34,7 +34,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.llms.base import BaseLLM
from langchain.schema import Generation, LLMResult
from langchain.schema.base import Generation, LLMResult
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__name__)

View File

@@ -7,7 +7,7 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.llms import OpenAI, OpenAIChat
from langchain.schema import LLMResult
from langchain.schema.base import LLMResult
class PromptLayerOpenAI(OpenAI):

View File

@@ -4,7 +4,7 @@ from pydantic import root_validator
from langchain.memory.chat_memory import BaseChatMemory, BaseMemory
from langchain.memory.utils import get_prompt_input_key
from langchain.schema import get_buffer_string
from langchain.schema.base import get_buffer_string
class ConversationBufferMemory(BaseChatMemory):

View File

@@ -1,7 +1,7 @@
from typing import Any, Dict, List
from langchain.memory.chat_memory import BaseChatMemory
from langchain.schema import BaseMessage, get_buffer_string
from langchain.schema.base import BaseMessage, get_buffer_string
class ConversationBufferWindowMemory(BaseChatMemory):

View File

@@ -5,7 +5,7 @@ from pydantic import Field
from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory
from langchain.memory.utils import get_prompt_input_key
from langchain.schema import BaseChatMessageHistory, BaseMemory
from langchain.schema.base import BaseChatMessageHistory, BaseMemory
class BaseChatMemory(BaseMemory, ABC):

View File

@@ -2,10 +2,10 @@ import json
import logging
from typing import List
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
@@ -153,7 +153,7 @@ class CassandraChatMessageHistory(BaseChatMessageHistory):
self.session.execute(
"""INSERT INTO message_store
(id, session_id, history) VALUES (%s, %s, %s);""",
(uuid.uuid4(), self.session_id, json.dumps(_message_to_dict(message))),
(uuid.uuid4(), self.session_id, json.dumps(message_to_dict(message))),
)
except (Unavailable, WriteTimeout, WriteFailure) as error:
logger.error("Unable to write chat history messages to cassandra")

View File

@@ -5,7 +5,7 @@ import logging
from types import TracebackType
from typing import TYPE_CHECKING, Any, List, Optional, Type
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
messages_from_dict,

View File

@@ -1,10 +1,10 @@
import logging
from typing import List, Optional
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
messages_to_dict,
)
@@ -65,7 +65,7 @@ class DynamoDBChatMessageHistory(BaseChatMessageHistory):
from botocore.exceptions import ClientError
messages = messages_to_dict(self.messages)
_message = _message_to_dict(message)
_message = message_to_dict(message)
messages.append(_message)
try:

View File

@@ -3,7 +3,7 @@ import logging
from pathlib import Path
from typing import List
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
messages_from_dict,

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
import logging
from typing import TYPE_CHECKING, List, Optional
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
messages_from_dict,

View File

@@ -2,7 +2,7 @@ from typing import List
from pydantic import BaseModel
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
)

View File

@@ -4,10 +4,10 @@ import json
from datetime import timedelta
from typing import TYPE_CHECKING, Any, Optional
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
from langchain.utils import get_from_env
@@ -153,7 +153,7 @@ class MomentoChatMessageHistory(BaseChatMessageHistory):
"""
from momento.responses import CacheListPushBack
item = json.dumps(_message_to_dict(message))
item = json.dumps(message_to_dict(message))
push_response = self.cache_client.list_push_back(
self.cache_name, self.key, item, ttl=self.ttl
)

View File

@@ -2,10 +2,10 @@ import json
import logging
from typing import List
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
@@ -75,7 +75,7 @@ class MongoDBChatMessageHistory(BaseChatMessageHistory):
self.collection.insert_one(
{
"SessionId": self.session_id,
"History": json.dumps(_message_to_dict(message)),
"History": json.dumps(message_to_dict(message)),
}
)
except errors.WriteError as err:

View File

@@ -2,10 +2,10 @@ import json
import logging
from typing import List
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
@@ -61,7 +61,7 @@ class PostgresChatMessageHistory(BaseChatMessageHistory):
sql.Identifier(self.table_name)
)
self.cursor.execute(
query, (self.session_id, json.dumps(_message_to_dict(message)))
query, (self.session_id, json.dumps(message_to_dict(message)))
)
self.connection.commit()

View File

@@ -2,10 +2,10 @@ import json
import logging
from typing import List, Optional
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
@@ -52,7 +52,7 @@ class RedisChatMessageHistory(BaseChatMessageHistory):
def add_message(self, message: BaseMessage) -> None:
"""Append the message to the record in Redis"""
self.redis_client.lpush(self.key, json.dumps(_message_to_dict(message)))
self.redis_client.lpush(self.key, json.dumps(message_to_dict(message)))
if self.ttl:
self.redis_client.expire(self.key, self.ttl)

View File

@@ -10,10 +10,10 @@ except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from langchain.schema import (
from langchain.schema.base import (
BaseChatMessageHistory,
BaseMessage,
_message_to_dict,
message_to_dict,
messages_from_dict,
)
@@ -66,7 +66,7 @@ class SQLChatMessageHistory(BaseChatMessageHistory):
def add_message(self, message: BaseMessage) -> None:
"""Append the message to the record in db"""
with self.Session() as session:
jsonstr = json.dumps(_message_to_dict(message))
jsonstr = json.dumps(message_to_dict(message))
session.add(self.Message(session_id=self.session_id, message=jsonstr))
session.commit()

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Dict, List, Optional
from langchain.schema import (
from langchain.schema.base import (
AIMessage,
BaseChatMessageHistory,
BaseMessage,

View File

@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Set
from pydantic import validator
from langchain.memory.chat_memory import BaseChatMemory
from langchain.schema import BaseMemory
from langchain.schema.base import BaseMemory
class CombinedMemory(BaseMemory):

View File

@@ -14,7 +14,7 @@ from langchain.memory.prompt import (
)
from langchain.memory.utils import get_prompt_input_key
from langchain.prompts.base import BasePromptTemplate
from langchain.schema import BaseMessage, get_buffer_string
from langchain.schema.base import BaseMessage, get_buffer_string
logger = logging.getLogger(__name__)

View File

@@ -13,7 +13,7 @@ from langchain.memory.prompt import (
)
from langchain.memory.utils import get_prompt_input_key
from langchain.prompts.base import BasePromptTemplate
from langchain.schema import (
from langchain.schema.base import (
BaseMessage,
SystemMessage,
get_buffer_string,

Some files were not shown because too many files have changed in this diff Show More