add model load testing functionality

This commit is contained in:
Krrish Dholakia 2023-08-11 17:59:51 -07:00
parent a2efd32f5c
commit 211e1edfcb
8 changed files with 35 additions and 8 deletions

View file

@ -302,6 +302,25 @@ def get_optional_params(
return optional_params
return optional_params
def load_test_model(model: str, custom_llm_provider: str = None, prompt: str = None, num_calls: int = None):
test_prompt = "Hey, how's it going"
test_calls = 100
if prompt:
test_prompt = prompt
if num_calls:
test_calls = num_calls
messages = [[{"role": "user", "content": test_prompt}] for _ in range(test_calls)]
start_time = time.time()
try:
litellm.batch_completion(model=model, messages=messages, custom_llm_provider=custom_llm_provider)
end_time = time.time()
response_time = end_time - start_time
return {"total_response_time": response_time, "calls_made": 100, "status": "success", "exception": None}
except Exception as e:
end_time = time.time()
response_time = end_time - start_time
return {"total_response_time": response_time, "calls_made": 100, "status": "failed", "exception": e}
def set_callbacks(callback_list):
global sentry_sdk_instance, capture_exception, add_breadcrumb, posthog, slack_app, alerts_channel, heliconeLogger, aispendLogger, berrispendLogger, supabaseClient
try: