From da2d39a8368c7853d760819bfed32c2e003ff318 Mon Sep 17 00:00:00 2001 From: Ben Browning Date: Sat, 12 Apr 2025 17:40:47 -0400 Subject: [PATCH] Handle chunks with null text in test_openai_completion.py This updates test_openai_completion.py to allow chunks with null text in streaming responses, as that's a valid chunk and I just hastily wrote the test without accounting for this originally. This is required to get this test passing with gpt-4o models and the openai provider. Signed-off-by: Ben Browning --- tests/integration/inference/test_openai_completion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/inference/test_openai_completion.py b/tests/integration/inference/test_openai_completion.py index 0905d5817..75b53100c 100644 --- a/tests/integration/inference/test_openai_completion.py +++ b/tests/integration/inference/test_openai_completion.py @@ -115,7 +115,7 @@ def test_openai_completion_streaming(openai_client, client_with_models, text_mod stream=True, max_tokens=50, ) - streamed_content = [chunk.choices[0].text for chunk in response] + streamed_content = [chunk.choices[0].text or "" for chunk in response] content_str = "".join(streamed_content).lower().strip() assert len(content_str) > 10