community[patch]: add missing chunk parameter for _stream/_astream (#17807)

- Description: Add missing chunk parameter for _stream/_astream for some
chat models, make all chat models in a consistent behaviour.
- Issue: N/A
- Dependencies: N/A
This commit is contained in:
mackong
2024-02-22 07:32:28 +08:00
committed by GitHub
parent 1b0802babe
commit 31891092d8
15 changed files with 71 additions and 42 deletions

View File

@@ -123,9 +123,10 @@ class ChatLiteLLMRouter(ChatLiteLLM):
delta = chunk["choices"][0]["delta"]
chunk = _convert_delta_to_message_chunk(delta, default_chunk_class)
default_chunk_class = chunk.__class__
yield ChatGenerationChunk(message=chunk)
cg_chunk = ChatGenerationChunk(message=chunk)
yield cg_chunk
if run_manager:
run_manager.on_llm_new_token(chunk.content, **params)
run_manager.on_llm_new_token(chunk.content, chunk=cg_chunk, **params)
async def _astream(
self,
@@ -148,9 +149,12 @@ class ChatLiteLLMRouter(ChatLiteLLM):
delta = chunk["choices"][0]["delta"]
chunk = _convert_delta_to_message_chunk(delta, default_chunk_class)
default_chunk_class = chunk.__class__
yield ChatGenerationChunk(message=chunk)
cg_chunk = ChatGenerationChunk(message=chunk)
yield cg_chunk
if run_manager:
await run_manager.on_llm_new_token(chunk.content, **params)
await run_manager.on_llm_new_token(
chunk.content, chunk=cg_chunk, **params
)
async def _agenerate(
self,