From d1f5bc4906dd596d13ed92959b5e968dd51234a1 Mon Sep 17 00:00:00 2001 From: wewebber-merlin <138414820+wewebber-merlin@users.noreply.github.com> Date: Sat, 9 Mar 2024 08:47:21 +1100 Subject: [PATCH] anthropic[patch]: add kwargs to format_output base (#18715) _generate() and _agenerate() both accept **kwargs, then pass them on to _format_output; but _format_output doesn't accept **kwargs. Attempting to pass, e.g., timeout=50 to _generate (or invoke()) results in a TypeError. Thank you for contributing to LangChain! - [ ] **PR title**: "package: description" - Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes. - Example: "community: add foobar LLM" - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** a description of the change - **Issue:** the issue # it fixes, if applicable - **Dependencies:** any dependencies required for this change - **Twitter handle:** if your PR gets announced, and you'd like a mention, we'll gladly shout you out! - [ ] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, hwchase17. --------- Co-authored-by: Erick Friis --- .../anthropic/langchain_anthropic/chat_models.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index 2aab9c3a062..330bcb4d6e3 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -263,7 +263,11 @@ class ChatAnthropic(BaseChatModel): await run_manager.on_llm_new_token(text, chunk=chunk) yield chunk - def _format_output(self, data: Any) -> ChatResult: + def _format_output( + self, + data: Any, + **kwargs: Any, + ) -> ChatResult: return ChatResult( generations=[ ChatGeneration(message=AIMessage(content=data.content[0].text)) @@ -285,7 +289,7 @@ class ChatAnthropic(BaseChatModel): return generate_from_stream(stream_iter) params = self._format_params(messages=messages, stop=stop, **kwargs) data = self._client.messages.create(**params) - return self._format_output(data) + return self._format_output(data, **kwargs) async def _agenerate( self, @@ -301,7 +305,7 @@ class ChatAnthropic(BaseChatModel): return await agenerate_from_stream(stream_iter) params = self._format_params(messages=messages, stop=stop, **kwargs) data = await self._async_client.messages.create(**params) - return self._format_output(data) + return self._format_output(data, **kwargs) @deprecated(since="0.1.0", removal="0.2.0", alternative="ChatAnthropic")