Harrison/split schema dir (#7025)

should be no functional changes

also keep __init__ exposing a lot for backwards compat

---------

Co-authored-by: Dev 2049 <dev.dev2049@gmail.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Harrison Chase
2023-07-01 10:39:19 -07:00
committed by GitHub
parent 556c425042
commit 3bfe7cf467
86 changed files with 1095 additions and 1028 deletions

View File

@@ -6,12 +6,10 @@ import pytest
from langchain.callbacks.manager import CallbackManager
from langchain.chat_models.anthropic import ChatAnthropic
from langchain.schema import (
AIMessage,
BaseMessage,
ChatGeneration,
HumanMessage,
LLMResult,
)
from langchain.schema.messages import AIMessage, BaseMessage, HumanMessage
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler

View File

@@ -8,13 +8,11 @@ import pytest
from langchain.chat_models import ChatGooglePalm
from langchain.schema import (
BaseMessage,
ChatGeneration,
ChatResult,
HumanMessage,
LLMResult,
SystemMessage,
)
from langchain.schema.messages import BaseMessage, HumanMessage, SystemMessage
def test_chat_google_palm() -> None:

View File

@@ -6,13 +6,11 @@ import pytest
from langchain.callbacks.manager import CallbackManager
from langchain.chat_models.openai import ChatOpenAI
from langchain.schema import (
BaseMessage,
ChatGeneration,
ChatResult,
HumanMessage,
LLMResult,
SystemMessage,
)
from langchain.schema.messages import BaseMessage, HumanMessage, SystemMessage
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler

View File

@@ -5,13 +5,11 @@ import pytest
from langchain.callbacks.manager import CallbackManager
from langchain.chat_models.promptlayer_openai import PromptLayerChatOpenAI
from langchain.schema import (
BaseMessage,
ChatGeneration,
ChatResult,
HumanMessage,
LLMResult,
SystemMessage,
)
from langchain.schema.messages import BaseMessage, HumanMessage, SystemMessage
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler

View File

@@ -13,11 +13,7 @@ import pytest
from langchain.chat_models import ChatVertexAI
from langchain.chat_models.vertexai import _MessagePair, _parse_chat_history
from langchain.schema import (
AIMessage,
HumanMessage,
SystemMessage,
)
from langchain.schema.messages import AIMessage, HumanMessage, SystemMessage
def test_vertexai_single_call() -> None:

View File

@@ -8,10 +8,7 @@ from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories.cassandra import (
CassandraChatMessageHistory,
)
from langchain.schema import (
AIMessage,
HumanMessage,
)
from langchain.schema.messages import AIMessage, HumanMessage
def _chat_message_history(

View File

@@ -3,7 +3,7 @@ import os
from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories import CosmosDBChatMessageHistory
from langchain.schema import _message_to_dict
from langchain.schema.messages import _message_to_dict
# Replace these with your Azure Cosmos DB endpoint and key
endpoint = os.environ["COSMOS_DB_ENDPOINT"]

View File

@@ -2,7 +2,7 @@ import json
from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories import FirestoreChatMessageHistory
from langchain.schema import _message_to_dict
from langchain.schema.messages import _message_to_dict
def test_memory_with_message_store() -> None:

View File

@@ -14,7 +14,7 @@ from momento import CacheClient, Configurations, CredentialProvider
from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories import MomentoChatMessageHistory
from langchain.schema import _message_to_dict
from langchain.schema.messages import _message_to_dict
def random_string() -> str:

View File

@@ -3,7 +3,7 @@ import os
from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories import MongoDBChatMessageHistory
from langchain.schema import _message_to_dict
from langchain.schema.messages import _message_to_dict
# Replace these with your mongodb connection string
connection_string = os.environ["MONGODB_CONNECTION_STRING"]

View File

@@ -2,7 +2,7 @@ import json
from langchain.memory import ConversationBufferMemory
from langchain.memory.chat_message_histories import RedisChatMessageHistory
from langchain.schema import _message_to_dict
from langchain.schema.messages import _message_to_dict
def test_memory_with_message_store() -> None:

View File

@@ -6,7 +6,7 @@ from uuid import UUID
from pydantic import BaseModel
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain.schema import BaseMessage
from langchain.schema.messages import BaseMessage
class BaseFakeCallbackHandler(BaseModel):

View File

@@ -11,7 +11,8 @@ from freezegun import freeze_time
from langchain.callbacks.manager import CallbackManager
from langchain.callbacks.tracers.base import BaseTracer, TracerException
from langchain.callbacks.tracers.schemas import Run
from langchain.schema import HumanMessage, LLMResult
from langchain.schema import LLMResult
from langchain.schema.messages import HumanMessage
SERIALIZED = {"id": ["llm"]}
SERIALIZED_CHAT = {"id": ["chat_model"]}

View File

@@ -18,7 +18,8 @@ from langchain.callbacks.tracers.langchain_v1 import (
TracerSessionV1,
)
from langchain.callbacks.tracers.schemas import Run, RunTypeEnum, TracerSessionV1Base
from langchain.schema import HumanMessage, LLMResult
from langchain.schema import LLMResult
from langchain.schema.messages import HumanMessage
TEST_SESSION_ID = 2023

View File

@@ -7,11 +7,7 @@ from langchain.chat_models.google_palm import (
ChatGooglePalmError,
_messages_to_prompt_dict,
)
from langchain.schema import (
AIMessage,
HumanMessage,
SystemMessage,
)
from langchain.schema.messages import AIMessage, HumanMessage, SystemMessage
def test_messages_to_prompt_dict_with_valid_messages() -> None:

View File

@@ -5,9 +5,7 @@ import json
from langchain.chat_models.openai import (
_convert_dict_to_message,
)
from langchain.schema import (
FunctionMessage,
)
from langchain.schema.messages import FunctionMessage
def test_function_message_dict_to_function_message() -> None:

View File

@@ -6,7 +6,8 @@ from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import SimpleChatModel
from langchain.schema import AIMessage, BaseMessage, ChatGeneration, ChatResult
from langchain.schema import ChatGeneration, ChatResult
from langchain.schema.messages import AIMessage, BaseMessage
class FakeChatModel(SimpleChatModel):

View File

@@ -1,7 +1,7 @@
"""Test LLM callbacks."""
from langchain.chat_models.fake import FakeListChatModel
from langchain.llms.fake import FakeListLLM
from langchain.schema import HumanMessage
from langchain.schema.messages import HumanMessage
from tests.unit_tests.callbacks.fake_callback_handler import (
FakeCallbackHandler,
FakeCallbackHandlerWithChatStart,

View File

@@ -5,7 +5,7 @@ from typing import Generator
import pytest
from langchain.memory.chat_message_histories import FileChatMessageHistory
from langchain.schema import AIMessage, HumanMessage
from langchain.schema.messages import AIMessage, HumanMessage
@pytest.fixture

View File

@@ -4,7 +4,7 @@ from typing import Tuple
import pytest
from langchain.memory.chat_message_histories import SQLChatMessageHistory
from langchain.schema import AIMessage, HumanMessage
from langchain.schema.messages import AIMessage, HumanMessage
# @pytest.fixture(params=[("SQLite"), ("postgresql")])

View File

@@ -4,7 +4,7 @@ import pytest
from pytest_mock import MockerFixture
from langchain.memory.chat_message_histories import ZepChatMessageHistory
from langchain.schema import AIMessage, HumanMessage
from langchain.schema.messages import AIMessage, HumanMessage
if TYPE_CHECKING:
from zep_python import ZepClient

View File

@@ -13,7 +13,7 @@ from langchain.prompts.chat import (
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain.schema import HumanMessage
from langchain.schema.messages import HumanMessage
def create_messages() -> List[BaseMessagePromptTemplate]:

View File

@@ -16,12 +16,10 @@ from langchain.chat_models.base import BaseChatModel, dumps
from langchain.llms import FakeListLLM
from langchain.llms.base import BaseLLM
from langchain.schema import (
AIMessage,
BaseMessage,
ChatGeneration,
Generation,
HumanMessage,
)
from langchain.schema.messages import AIMessage, BaseMessage, HumanMessage
def get_sqlite_cache() -> SQLAlchemyCache:

View File

@@ -2,7 +2,7 @@
import unittest
from langchain.schema import (
from langchain.schema.messages import (
AIMessage,
HumanMessage,
SystemMessage,