diff --git a/libs/langchain/langchain/schema/runnable.py b/libs/langchain/langchain/schema/runnable.py index 0ffb41c7cf6..19a3be143fb 100644 --- a/libs/langchain/langchain/schema/runnable.py +++ b/libs/langchain/langchain/schema/runnable.py @@ -131,6 +131,12 @@ class Runnable(Generic[Input, Output], ABC): ) -> AsyncIterator[Output]: yield await self.ainvoke(input, config) + def bind(self, **kwargs: Any) -> Runnable[Input, Output]: + """ + Bind arguments to a Runnable, returning a new Runnable. + """ + return RunnableBinding(bound=self, kwargs=kwargs) + def _get_config_list( self, config: Optional[Union[RunnableConfig, List[RunnableConfig]]], length: int ) -> List[RunnableConfig]: @@ -692,6 +698,60 @@ class RunnablePassthrough(Serializable, Runnable[Input, Input]): return self._call_with_config(lambda x: x, input, config) +class RunnableBinding(Serializable, Runnable[Input, Output]): + bound: Runnable[Input, Output] + + kwargs: Mapping[str, Any] + + class Config: + arbitrary_types_allowed = True + + @property + def lc_serializable(self) -> bool: + return True + + def invoke(self, input: Input, config: Optional[RunnableConfig] = None) -> Output: + return self.bound.invoke(input, config, **self.kwargs) + + async def ainvoke( + self, input: Input, config: Optional[RunnableConfig] = None + ) -> Output: + return await self.bound.ainvoke(input, config, **self.kwargs) + + def batch( + self, + inputs: List[Input], + config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, + *, + max_concurrency: Optional[int] = None, + ) -> List[Output]: + return self.bound.batch( + inputs, config, max_concurrency=max_concurrency, **self.kwargs + ) + + async def abatch( + self, + inputs: List[Input], + config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, + *, + max_concurrency: Optional[int] = None, + ) -> List[Output]: + return await self.bound.abatch( + inputs, config, max_concurrency=max_concurrency, **self.kwargs + ) + + def stream( + self, input: Input, config: Optional[RunnableConfig] = None + ) -> Iterator[Output]: + yield from self.bound.stream(input, config, **self.kwargs) + + async def astream( + self, input: Input, config: Optional[RunnableConfig] = None + ) -> AsyncIterator[Output]: + async for item in self.bound.astream(input, config, **self.kwargs): + yield item + + def _patch_config( config: RunnableConfig, callback_manager: BaseCallbackManager ) -> RunnableConfig: diff --git a/libs/langchain/tests/unit_tests/schema/__snapshots__/test_runnable.ambr b/libs/langchain/tests/unit_tests/schema/__snapshots__/test_runnable.ambr index 271b79c1065..ec985ee9d6e 100644 --- a/libs/langchain/tests/unit_tests/schema/__snapshots__/test_runnable.ambr +++ b/libs/langchain/tests/unit_tests/schema/__snapshots__/test_runnable.ambr @@ -773,13 +773,30 @@ "steps": { "chat": { "lc": 1, - "type": "not_implemented", + "type": "constructor", "id": [ "langchain", - "chat_models", - "fake", - "FakeListChatModel" - ] + "schema", + "runnable", + "RunnableBinding" + ], + "kwargs": { + "bound": { + "lc": 1, + "type": "not_implemented", + "id": [ + "langchain", + "chat_models", + "fake", + "FakeListChatModel" + ] + }, + "kwargs": { + "stop": [ + "Thought:" + ] + } + } }, "llm": { "lc": 1, @@ -810,6 +827,6 @@ # --- # name: test_seq_prompt_map.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string'}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string'}}}}]}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableMap'], 'kwargs': {'steps': {'chat': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, 'llm': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, 'passthrough': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}}}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'chat': AIMessage(content="i'm a chatbot", additional_kwargs={}, example=False), 'llm': "i'm a textbot", 'passthrough': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=1, child_execution_order=7, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string'}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string'}}}}]}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=2, child_execution_order=2, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='RunnableLambda', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=3, child_execution_order=3, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableMap', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableMap'], 'kwargs': {'steps': {'chat': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, 'llm': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, 'passthrough': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'chat': AIMessage(content="i'm a chatbot", additional_kwargs={}, example=False), 'llm': "i'm a textbot", 'passthrough': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=4, child_execution_order=7, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ["i'm a chatbot"], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': "i'm a chatbot", 'generation_info': None, 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': "i'm a chatbot"}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=5, child_execution_order=5, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ["i'm a textbot"], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': "i'm a textbot", 'generation_info': None}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=6, child_execution_order=6, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='RunnableLambda', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=7, child_execution_order=7, child_runs=[])])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string'}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string'}}}}]}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableMap'], 'kwargs': {'steps': {'chat': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableBinding'], 'kwargs': {'bound': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, 'kwargs': {'stop': ['Thought:']}}}, 'llm': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, 'passthrough': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}}}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'chat': AIMessage(content="i'm a chatbot", additional_kwargs={}, example=False), 'llm': "i'm a textbot", 'passthrough': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=1, child_execution_order=7, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string'}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string'}}}}]}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=2, child_execution_order=2, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='RunnableLambda', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=3, child_execution_order=3, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableMap', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableMap'], 'kwargs': {'steps': {'chat': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableBinding'], 'kwargs': {'bound': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, 'kwargs': {'stop': ['Thought:']}}}, 'llm': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, 'passthrough': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'chat': AIMessage(content="i'm a chatbot", additional_kwargs={}, example=False), 'llm': "i'm a textbot", 'passthrough': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=[], execution_order=4, child_execution_order=7, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ["i'm a chatbot"], '_type': 'fake-list-chat-model', 'stop': ['Thought:']}, 'options': {'stop': ['Thought:']}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'chat_models', 'fake', 'FakeListChatModel']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': "i'm a chatbot", 'generation_info': None, 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': "i'm a chatbot"}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=5, child_execution_order=5, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ["i'm a textbot"], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'llms', 'fake', 'FakeListLLM']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': "i'm a textbot", 'generation_info': None}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=6, child_execution_order=6, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='RunnableLambda', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type=, end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain', 'schema', 'runnable', 'RunnableLambda']}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, outputs={'output': ChatPromptValue(messages=[SystemMessage(content='You are a nice assistant.', additional_kwargs={}), HumanMessage(content='What is your name?', additional_kwargs={}, example=False)])}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=[], execution_order=7, child_execution_order=7, child_runs=[])])]), ]) # --- diff --git a/libs/langchain/tests/unit_tests/schema/test_runnable.py b/libs/langchain/tests/unit_tests/schema/test_runnable.py index 71a7984d126..23fda63f45e 100644 --- a/libs/langchain/tests/unit_tests/schema/test_runnable.py +++ b/libs/langchain/tests/unit_tests/schema/test_runnable.py @@ -566,7 +566,7 @@ def test_seq_prompt_map( prompt | passthrough | { - "chat": chat, + "chat": chat.bind(stop=["Thought:"]), "llm": llm, "passthrough": passthrough, }