community[patch]: fix structured_output in llamacpp integration (#27202)

Resolves https://github.com/langchain-ai/langchain/issues/25318.
This commit is contained in:
ccurme
2024-10-08 15:16:59 -04:00
committed by GitHub
parent c3cb56a9e8
commit e3920f2320
2 changed files with 22 additions and 2 deletions

View File

@@ -0,0 +1,19 @@
from pydantic import BaseModel, Field
from langchain_community.chat_models import ChatLlamaCpp
class Joke(BaseModel):
"""Joke to tell user."""
setup: str = Field(description="question to set up a joke")
punchline: str = Field(description="answer to resolve the joke")
# TODO: replace with standard integration tests
# See example in tests/integration_tests/chat_models/test_litellm.py
def test_structured_output() -> None:
llm = ChatLlamaCpp(model_path="/path/to/Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf")
structured_llm = llm.with_structured_output(Joke)
result = structured_llm.invoke("Tell me a short joke about cats.")
assert isinstance(result, Joke)