mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
(test) load test proxy
This commit is contained in:
parent
4ccee2e1a6
commit
bd5c89aab9
2 changed files with 598 additions and 5 deletions
|
@ -100,7 +100,7 @@ def create_job_and_poll(request_num):
|
|||
try:
|
||||
with open("load_test_log.txt", "a") as response_file:
|
||||
response_file.write(
|
||||
f"HI"
|
||||
f"Response for request: {request_num}\n{llm_response}\n\n"
|
||||
)
|
||||
except Exception as e:
|
||||
print("GOT EXCEPTION", e)
|
||||
|
@ -110,7 +110,7 @@ def create_job_and_poll(request_num):
|
|||
print("got exception when polling", e)
|
||||
|
||||
# Number of requests
|
||||
num_requests = 50
|
||||
num_requests = 100
|
||||
|
||||
# Use ThreadPoolExecutor for parallel execution
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=num_requests) as executor:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue