mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix(utils.py): await async function in client wrapper
This commit is contained in:
parent
efe81032f4
commit
e07bf0a8de
2 changed files with 3 additions and 4 deletions
|
@ -42,6 +42,7 @@ def test_get_response_streaming():
|
|||
user_message = "write a short poem in one sentence"
|
||||
messages = [{"content": user_message, "role": "user"}]
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = await acompletion(model="command-nightly", messages=messages, stream=True)
|
||||
print(type(response))
|
||||
|
||||
|
@ -65,7 +66,7 @@ def test_get_response_streaming():
|
|||
asyncio.run(test_async_call())
|
||||
|
||||
|
||||
# test_get_response_streaming()
|
||||
test_get_response_streaming()
|
||||
|
||||
def test_get_response_non_openai_streaming():
|
||||
import asyncio
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue