mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(test) add proxy cli tests
This commit is contained in:
parent
9bfbdc18fb
commit
a44aa8c10c
1 changed files with 29 additions and 0 deletions
29
litellm/proxy/tests/test_async.py
Normal file
29
litellm/proxy/tests/test_async.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# This tests the litelm proxy
|
||||
# it makes async Completion requests with streaming
|
||||
import openai
|
||||
|
||||
openai.api_base = "http://0.0.0.0:8000"
|
||||
openai.api_key = "temp-key"
|
||||
print(openai.api_base)
|
||||
|
||||
async def test_async_completion():
|
||||
response = await openai.Completion.acreate(
|
||||
model="gpt-3.5-turbo",
|
||||
prompt='this is a test request, write a short poem',
|
||||
)
|
||||
print(response)
|
||||
|
||||
print("test_streaming")
|
||||
response = await openai.Completion.acreate(
|
||||
model="gpt-3.5-turbo",
|
||||
prompt='this is a test request, write a short poem',
|
||||
stream=True
|
||||
)
|
||||
print(response)
|
||||
async for chunk in response:
|
||||
print(chunk)
|
||||
|
||||
|
||||
import asyncio
|
||||
asyncio.run(test_async_completion())
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue