mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
Handle chunks with null text in test_openai_completion.py
This updates test_openai_completion.py to allow chunks with null text in streaming responses, as that's a valid chunk and I just hastily wrote the test without accounting for this originally. This is required to get this test passing with gpt-4o models and the openai provider. Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
parent
c014571258
commit
da2d39a836
1 changed files with 1 additions and 1 deletions
|
@ -115,7 +115,7 @@ def test_openai_completion_streaming(openai_client, client_with_models, text_mod
|
|||
stream=True,
|
||||
max_tokens=50,
|
||||
)
|
||||
streamed_content = [chunk.choices[0].text for chunk in response]
|
||||
streamed_content = [chunk.choices[0].text or "" for chunk in response]
|
||||
content_str = "".join(streamed_content).lower().strip()
|
||||
assert len(content_str) > 10
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue