fix(router.py): fix order of dereferenced dictionaries

This commit is contained in:
Krrish Dholakia 2024-01-22 21:42:25 -08:00
parent 7079b951de
commit 5e0d99b2ef
3 changed files with 18 additions and 14 deletions

View file

@ -142,7 +142,11 @@ def completion(
logging_obj.pre_call(
input=prompt,
api_key=api_key,
additional_args={"complete_input_dict": data, "api_base": api_base},
additional_args={
"complete_input_dict": data,
"api_base": api_base,
"headers": headers,
},
)
## COMPLETION CALL

View file

@ -394,11 +394,11 @@ class Router:
response = litellm.completion(
**{
**kwargs,
**data,
"messages": messages,
"caching": self.cache_responses,
"client": model_client,
**kwargs,
}
)
verbose_router_logger.info(
@ -479,7 +479,8 @@ class Router:
kwargs.setdefault("metadata", {}).update({"model_group": model})
response = await self.async_function_with_fallbacks(
**kwargs, **completion_kwargs
**completion_kwargs,
**kwargs,
)
return response
@ -525,16 +526,15 @@ class Router:
else:
model_client = potential_model_client
self.total_calls[model_name] += 1
response = await litellm.acompletion(
**{
**data,
"messages": messages,
"caching": self.cache_responses,
"client": model_client,
"timeout": self.timeout,
**kwargs,
}
)
final_data = {
**kwargs,
**data,
"messages": messages,
"caching": self.cache_responses,
"client": model_client,
"timeout": self.timeout,
}
response = await litellm.acompletion(**final_data)
self.success_calls[model_name] += 1
verbose_router_logger.info(
f"litellm.acompletion(model={model_name})\033[32m 200 OK\033[0m"

View file

@ -844,7 +844,7 @@ class Logging:
curl_command += additional_args.get("request_str", None)
elif api_base == "":
curl_command = self.model_call_details
print_verbose(f"\033[92m{curl_command}\033[0m\n")
verbose_logger.info(f"\033[92m{curl_command}\033[0m\n")
if self.logger_fn and callable(self.logger_fn):
try:
self.logger_fn(