mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(test) async for chunk
This commit is contained in:
parent
452bba1b5d
commit
b5968a3ed8
1 changed files with 2 additions and 0 deletions
|
@ -111,6 +111,8 @@ def test_get_response_streaming():
|
||||||
i = 0
|
i = 0
|
||||||
async for chunk in response:
|
async for chunk in response:
|
||||||
token = chunk["choices"][0]["delta"].get("content", "")
|
token = chunk["choices"][0]["delta"].get("content", "")
|
||||||
|
if token == None:
|
||||||
|
continue # openai v1.0.0 returns content=None
|
||||||
output += token
|
output += token
|
||||||
assert output is not None, "output cannot be None."
|
assert output is not None, "output cannot be None."
|
||||||
assert isinstance(output, str), "output needs to be of type str"
|
assert isinstance(output, str), "output needs to be of type str"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue