langchain-groq: Add response metadata when streaming (#30379)

- **Description:** Add missing `model_name` and `system_fingerprint`
metadata when streaming.

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Simon Paredes 2025-03-23 15:34:41 -03:00 committed by GitHub
parent e2d9fe766f
commit df4448dfac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 14 additions and 3 deletions

View File

@ -541,6 +541,9 @@ class ChatGroq(BaseChatModel):
generation_info = {}
if finish_reason := choice.get("finish_reason"):
generation_info["finish_reason"] = finish_reason
generation_info["model_name"] = self.model_name
if system_fingerprint := chunk.get("system_fingerprint"):
generation_info["system_fingerprint"] = system_fingerprint
logprobs = choice.get("logprobs")
if logprobs:
generation_info["logprobs"] = logprobs
@ -579,6 +582,9 @@ class ChatGroq(BaseChatModel):
generation_info = {}
if finish_reason := choice.get("finish_reason"):
generation_info["finish_reason"] = finish_reason
generation_info["model_name"] = self.model_name
if system_fingerprint := chunk.get("system_fingerprint"):
generation_info["system_fingerprint"] = system_fingerprint
logprobs = choice.get("logprobs")
if logprobs:
generation_info["logprobs"] = logprobs

View File

@ -98,16 +98,19 @@ async def test_astream() -> None:
full: Optional[BaseMessageChunk] = None
chunks_with_token_counts = 0
chunks_with_response_metadata = 0
async for token in chat.astream("Welcome to the Groqetship!"):
assert isinstance(token, AIMessageChunk)
assert isinstance(token.content, str)
full = token if full is None else full + token
if token.usage_metadata is not None:
chunks_with_token_counts += 1
if chunks_with_token_counts != 1:
if token.response_metadata:
chunks_with_response_metadata += 1
if chunks_with_token_counts != 1 or chunks_with_response_metadata != 1:
raise AssertionError(
"Expected exactly one chunk with token counts. "
"AIMessageChunk aggregation adds counts. Check that "
"Expected exactly one chunk with token counts or metadata. "
"AIMessageChunk aggregation adds / appends these metadata. Check that "
"this is behaving properly."
)
assert isinstance(full, AIMessageChunk)
@ -118,6 +121,8 @@ async def test_astream() -> None:
full.usage_metadata["input_tokens"] + full.usage_metadata["output_tokens"]
== full.usage_metadata["total_tokens"]
)
for expected_metadata in ["model_name", "system_fingerprint"]:
assert full.response_metadata[expected_metadata]
#