Merge branch 'master' into fix/rag-tutorial-part-1

This commit is contained in:
Yasien Dwieb 2025-07-19 12:36:57 +03:00 committed by GitHub
commit ebb1024966
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 32 additions and 1 deletions

View File

@ -36,6 +36,8 @@ class FakeMessagesListChatModel(BaseChatModel):
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> ChatResult:
if self.sleep is not None:
time.sleep(self.sleep)
response = self.responses[self.i]
if self.i < len(self.responses) - 1:
self.i += 1

View File

@ -150,6 +150,11 @@ def parse_tag(template: str, l_del: str, r_del: str) -> tuple[tuple[str, str], s
msg = f"unclosed tag at line {_CURRENT_LINE}"
raise ChevronError(msg) from e
# Check for empty tags
if not tag.strip():
msg = f"empty tag at line {_CURRENT_LINE}"
raise ChevronError(msg)
# Find the type meaning of the first character
tag_type = tag_types.get(tag[0], "variable")

View File

@ -1,5 +1,6 @@
"""Tests for verifying that testing utility code works as expected."""
import time
from itertools import cycle
from typing import Any, Optional, Union
from uuid import UUID
@ -9,10 +10,11 @@ from typing_extensions import override
from langchain_core.callbacks.base import AsyncCallbackHandler
from langchain_core.language_models import (
FakeListChatModel,
FakeMessagesListChatModel,
GenericFakeChatModel,
ParrotFakeChatModel,
)
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from tests.unit_tests.stubs import (
_any_id_ai_message,
@ -230,3 +232,18 @@ def test_fake_list_chat_model_batch() -> None:
fake = FakeListChatModel(responses=["a", "b", "c"])
resp = fake.batch(["1", "2", "3"])
assert resp == expected
def test_fake_messages_list_chat_model_sleep_delay() -> None:
sleep_time = 0.1
model = FakeMessagesListChatModel(
responses=[AIMessage(content="A"), AIMessage(content="B")],
sleep=sleep_time,
)
messages = [HumanMessage(content="C")]
start = time.time()
model.invoke(messages)
elapsed = time.time() - start
assert elapsed >= sleep_time

View File

@ -2,6 +2,7 @@ from functools import partial
from inspect import isclass
from typing import Any, Union, cast
import pytest
from pydantic import BaseModel
from langchain_core.language_models import FakeListChatModel
@ -10,6 +11,7 @@ from langchain_core.load.load import loads
from langchain_core.messages import HumanMessage
from langchain_core.prompts.structured import StructuredPrompt
from langchain_core.runnables.base import Runnable, RunnableLambda
from langchain_core.utils.mustache import ChevronError
from langchain_core.utils.pydantic import is_basemodel_subclass
@ -128,3 +130,8 @@ def test_structured_prompt_template_format() -> None:
assert prompt.invoke({"person": {"name": "foo"}}).to_messages() == [
HumanMessage("hi foo")
]
def test_structured_prompt_template_empty_vars() -> None:
with pytest.raises(ChevronError, match="empty tag"):
StructuredPrompt([("human", "hi {{}}")], schema={}, template_format="mustache")