community[patch]: add missing chunk parameter for _stream/_astream (#17807)

- Description: Add missing chunk parameter for _stream/_astream for some
chat models, make all chat models in a consistent behaviour.
- Issue: N/A
- Dependencies: N/A
This commit is contained in:
mackong
2024-02-22 07:32:28 +08:00
committed by GitHub
parent 1b0802babe
commit 31891092d8
15 changed files with 71 additions and 42 deletions

View File

@@ -147,9 +147,10 @@ class ChatCohere(BaseChatModel, BaseCohere):
for data in stream:
if data.event_type == "text-generation":
delta = data.text
yield ChatGenerationChunk(message=AIMessageChunk(content=delta))
chunk = ChatGenerationChunk(message=AIMessageChunk(content=delta))
yield chunk
if run_manager:
run_manager.on_llm_new_token(delta)
run_manager.on_llm_new_token(delta, chunk=chunk)
async def _astream(
self,
@@ -164,9 +165,10 @@ class ChatCohere(BaseChatModel, BaseCohere):
async for data in stream:
if data.event_type == "text-generation":
delta = data.text
yield ChatGenerationChunk(message=AIMessageChunk(content=delta))
chunk = ChatGenerationChunk(message=AIMessageChunk(content=delta))
yield chunk
if run_manager:
await run_manager.on_llm_new_token(delta)
await run_manager.on_llm_new_token(delta, chunk=chunk)
def _get_generation_info(self, response: Any) -> Dict[str, Any]:
"""Get the generation info from cohere API response."""