(test) load test queing

This commit is contained in:
ishaan-jaff 2023-11-21 12:51:40 -08:00
parent 015c7de549
commit b98d9199f6

View file

@ -91,13 +91,23 @@ def make_openai_completion(question):
) )
# polling the url # polling the url
url = response["url"] while True:
polling_url = f"http://0.0.0.0:8000{url}" try:
print(f"POLLING JOB{polling_url}") print("entering the loop to poll")
response = requests.get(polling_url) print("response", response)
response = response.json() url = response["url"]
status = response["status"] polling_url = f"http://0.0.0.0:8000{url}"
print(f"POLLING JOB{polling_url}\nSTATUS: {status}, \n Response {response}") print(f"POLLING JOB{polling_url}")
polling_response = requests.get(polling_url)
print("\n RESPONSE FROM POLLING JoB", polling_response)
polling_response = polling_response.json()
print("\n RESPONSE FROM POLLING JoB", polling_response)
status = polling_response["status"]
print(f"POLLING JOB{polling_url}\nSTATUS: {status}, \n Response {polling_response}")
time.sleep(0.5)
except Exception as e:
print("got exception in polling", e)
break
# if status == "finished": # if status == "finished":
# print() # print()
@ -111,41 +121,42 @@ def make_openai_completion(question):
f"Question: {question[:100]}\nException: {str(e)}\n\n" f"Question: {question[:100]}\nException: {str(e)}\n\n"
) )
return None return None
make_openai_completion("hello what's the time")
# Number of concurrent calls (you can adjust this) # # Number of concurrent calls (you can adjust this)
concurrent_calls = 100 # concurrent_calls = 1
# List to store the futures of concurrent calls # # List to store the futures of concurrent calls
futures = [] # futures = []
# Make concurrent calls # # Make concurrent calls
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_calls) as executor: # with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_calls) as executor:
for _ in range(concurrent_calls): # for _ in range(concurrent_calls):
random_question = random.choice(questions) # random_question = random.choice(questions)
futures.append(executor.submit(make_openai_completion, random_question)) # futures.append(executor.submit(make_openai_completion, random_question))
# Wait for all futures to complete # # Wait for all futures to complete
concurrent.futures.wait(futures) # concurrent.futures.wait(futures)
# Summarize the results # # Summarize the results
successful_calls = 0 # successful_calls = 0
failed_calls = 0 # failed_calls = 0
for future in futures: # for future in futures:
if future.result() is not None: # if future.result() is not None:
successful_calls += 1 # successful_calls += 1
else: # else:
failed_calls += 1 # failed_calls += 1
print(f"Load test Summary:") # print(f"Load test Summary:")
print(f"Total Requests: {concurrent_calls}") # print(f"Total Requests: {concurrent_calls}")
print(f"Successful Calls: {successful_calls}") # print(f"Successful Calls: {successful_calls}")
print(f"Failed Calls: {failed_calls}") # print(f"Failed Calls: {failed_calls}")
# Display content of the logs # # Display content of the logs
with open("request_log.txt", "r") as log_file: # with open("request_log.txt", "r") as log_file:
print("\nRequest Log:\n", log_file.read()) # print("\nRequest Log:\n", log_file.read())
with open("error_log.txt", "r") as error_log_file: # with open("error_log.txt", "r") as error_log_file:
print("\nError Log:\n", error_log_file.read()) # print("\nError Log:\n", error_log_file.read())