coverage for custom api scenario in load testing

This commit is contained in:
Krrish Dholakia 2023-08-11 18:20:21 -07:00
parent 303ee905e6
commit 80d77fed71
8 changed files with 8 additions and 7 deletions

View file

@ -302,7 +302,7 @@ def get_optional_params(
return optional_params
return optional_params
def load_test_model(model: str, custom_llm_provider: str = None, prompt: str = None, num_calls: int = None):
def load_test_model(model: str, custom_llm_provider: str = None, custom_api_base: str = None, prompt: str = None, num_calls: int = None, force_timeout: int = None):
test_prompt = "Hey, how's it going"
test_calls = 100
if prompt:
@ -312,7 +312,7 @@ def load_test_model(model: str, custom_llm_provider: str = None, prompt: str = N
messages = [[{"role": "user", "content": test_prompt}] for _ in range(test_calls)]
start_time = time.time()
try:
litellm.batch_completion(model=model, messages=messages, custom_llm_provider=custom_llm_provider)
litellm.batch_completion(model=model, messages=messages, custom_llm_provider=custom_llm_provider, custom_api_base = custom_api_base, force_timeout=force_timeout)
end_time = time.time()
response_time = end_time - start_time
return {"total_response_time": response_time, "calls_made": 100, "status": "success", "exception": None}