From 6d71bb83de71e74f5bb77383c1fed121bd72b1b5 Mon Sep 17 00:00:00 2001 From: Yoshi <70424721+yoshihyoda@users.noreply.github.com> Date: Sat, 19 Jul 2025 14:30:15 -0700 Subject: [PATCH] fix(core): fix docstrings and add sleep to FakeListChatModel._call (#32108) --- .../core/langchain_core/language_models/fake_chat_models.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/core/langchain_core/language_models/fake_chat_models.py b/libs/core/langchain_core/language_models/fake_chat_models.py index b8dd7325242..184a4fcb154 100644 --- a/libs/core/langchain_core/language_models/fake_chat_models.py +++ b/libs/core/langchain_core/language_models/fake_chat_models.py @@ -63,9 +63,9 @@ class FakeListChatModel(SimpleChatModel): """List of responses to **cycle** through in order.""" sleep: Optional[float] = None i: int = 0 - """List of responses to **cycle** through in order.""" - error_on_chunk_number: Optional[int] = None """Internally incremented after every model invocation.""" + error_on_chunk_number: Optional[int] = None + """If set, raise an error on the specified chunk number during streaming.""" @property @override @@ -81,6 +81,8 @@ class FakeListChatModel(SimpleChatModel): **kwargs: Any, ) -> str: """First try to lookup in queries, else return 'foo' or 'bar'.""" + if self.sleep is not None: + time.sleep(self.sleep) response = self.responses[self.i] if self.i < len(self.responses) - 1: self.i += 1