From e3920f23205384536180d7004179434c41a19360 Mon Sep 17 00:00:00 2001 From: ccurme Date: Tue, 8 Oct 2024 15:16:59 -0400 Subject: [PATCH] community[patch]: fix structured_output in llamacpp integration (#27202) Resolves https://github.com/langchain-ai/langchain/issues/25318. --- .../chat_models/llamacpp.py | 5 +++-- .../chat_models/test_llamacpp.py | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 libs/community/tests/integration_tests/chat_models/test_llamacpp.py diff --git a/libs/community/langchain_community/chat_models/llamacpp.py b/libs/community/langchain_community/chat_models/llamacpp.py index 65055e7fa7b..ea9b5975d11 100644 --- a/libs/community/langchain_community/chat_models/llamacpp.py +++ b/libs/community/langchain_community/chat_models/llamacpp.py @@ -342,7 +342,7 @@ class ChatLlamaCpp(BaseChatModel): self, tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], *, - tool_choice: Optional[Union[Dict[str, Dict], bool, str]] = None, + tool_choice: Optional[Union[dict, bool, str]] = None, **kwargs: Any, ) -> Runnable[LanguageModelInput, BaseMessage]: """Bind tool-like objects to this chat model @@ -538,7 +538,8 @@ class ChatLlamaCpp(BaseChatModel): "Received None." ) tool_name = convert_to_openai_tool(schema)["function"]["name"] - llm = self.bind_tools([schema], tool_choice=tool_name) + tool_choice = {"type": "function", "function": {"name": tool_name}} + llm = self.bind_tools([schema], tool_choice=tool_choice) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[cast(Type, schema)], first_tool_only=True diff --git a/libs/community/tests/integration_tests/chat_models/test_llamacpp.py b/libs/community/tests/integration_tests/chat_models/test_llamacpp.py new file mode 100644 index 00000000000..589f6e3063d --- /dev/null +++ b/libs/community/tests/integration_tests/chat_models/test_llamacpp.py @@ -0,0 +1,19 @@ +from pydantic import BaseModel, Field + +from langchain_community.chat_models import ChatLlamaCpp + + +class Joke(BaseModel): + """Joke to tell user.""" + + setup: str = Field(description="question to set up a joke") + punchline: str = Field(description="answer to resolve the joke") + + +# TODO: replace with standard integration tests +# See example in tests/integration_tests/chat_models/test_litellm.py +def test_structured_output() -> None: + llm = ChatLlamaCpp(model_path="/path/to/Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf") + structured_llm = llm.with_structured_output(Joke) + result = structured_llm.invoke("Tell me a short joke about cats.") + assert isinstance(result, Joke)