mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 10:14:26 +00:00
(test) load test proxy completion
This commit is contained in:
parent
9747cc5aad
commit
5e2c13fb11
1 changed files with 27 additions and 0 deletions
27
litellm/proxy/tests/load_test_completion.py
Normal file
27
litellm/proxy/tests/load_test_completion.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
import time, asyncio
|
||||
from openai import AsyncOpenAI
|
||||
import uuid
|
||||
|
||||
|
||||
litellm_client = AsyncOpenAI(
|
||||
api_key="test",
|
||||
base_url="http://0.0.0.0:8000"
|
||||
)
|
||||
|
||||
async def litellm_completion():
|
||||
return await litellm_client.chat.completions.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "user", "content": f"This is a test: {uuid.uuid4()}"}],
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
start = time.time()
|
||||
n = 1 # Number of concurrent tasks
|
||||
tasks = [litellm_completion() for _ in range(n)]
|
||||
chat_completions = await asyncio.gather(*tasks)
|
||||
successful_completions = [c for c in chat_completions if c is not None]
|
||||
print(n, time.time() - start, len(successful_completions))
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
Loading…
Add table
Add a link
Reference in a new issue