mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
(test) async for chunk
This commit is contained in:
parent
452bba1b5d
commit
b5968a3ed8
1 changed files with 2 additions and 0 deletions
|
@ -111,6 +111,8 @@ def test_get_response_streaming():
|
|||
i = 0
|
||||
async for chunk in response:
|
||||
token = chunk["choices"][0]["delta"].get("content", "")
|
||||
if token == None:
|
||||
continue # openai v1.0.0 returns content=None
|
||||
output += token
|
||||
assert output is not None, "output cannot be None."
|
||||
assert isinstance(output, str), "output needs to be of type str"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue