openai[patch]: fix Azure LLM test (#29302)

The tokens I get are:
```
['', '\n\n', 'The', ' sun', ' was', ' setting', ' over', ' the', ' horizon', ',', ' casting', '']
```
so possibly an extra empty token is included in the output.

lmk @efriis if we should look into this further.
This commit is contained in:
ccurme
2025-01-19 12:25:42 -05:00
committed by GitHub
parent 6b249a0dc2
commit c20f7418c7

View File

@@ -148,7 +148,7 @@ def test_openai_streaming_callback() -> None:
verbose=True,
)
llm.invoke("Write me a sentence with 100 words.")
assert callback_handler.llm_streams == 11
assert callback_handler.llm_streams == 12
@pytest.mark.scheduled
@@ -171,5 +171,5 @@ async def test_openai_async_streaming_callback() -> None:
verbose=True,
)
result = await llm.agenerate(["Write me a sentence with 100 words."])
assert callback_handler.llm_streams == 11
assert callback_handler.llm_streams == 12
assert isinstance(result, LLMResult)