mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(utils.py): add metadata to logging obj on setup, if exists
This commit is contained in:
parent
1ec5083542
commit
f05aba1f85
2 changed files with 11 additions and 11 deletions
|
@ -186,4 +186,4 @@ class MaxParallelRequestsHandler(CustomLogger):
|
||||||
request_count_api_key, new_val, ttl=60
|
request_count_api_key, new_val, ttl=60
|
||||||
) # save in cache for up to 1 min.
|
) # save in cache for up to 1 min.
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.print_verbose(f"An exception occurred - {str(e)}") # noqa
|
print(f"An exception occurred - {str(e)}") # noqa
|
||||||
|
|
|
@ -773,7 +773,7 @@ class Logging:
|
||||||
self.model = model
|
self.model = model
|
||||||
self.user = user
|
self.user = user
|
||||||
self.litellm_params = litellm_params
|
self.litellm_params = litellm_params
|
||||||
self.logger_fn = litellm_params["logger_fn"]
|
self.logger_fn = litellm_params.get("logger_fn", None)
|
||||||
print_verbose(f"self.optional_params: {self.optional_params}")
|
print_verbose(f"self.optional_params: {self.optional_params}")
|
||||||
self.model_call_details = {
|
self.model_call_details = {
|
||||||
"model": self.model,
|
"model": self.model,
|
||||||
|
@ -1941,6 +1941,15 @@ def client(original_function):
|
||||||
call_type=call_type,
|
call_type=call_type,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
)
|
)
|
||||||
|
## check if metadata is passed in
|
||||||
|
if "metadata" in kwargs:
|
||||||
|
litellm_params = {"metadata": kwargs["metadata"]}
|
||||||
|
logging_obj.update_environment_variables(
|
||||||
|
model=model,
|
||||||
|
user="",
|
||||||
|
optional_params={},
|
||||||
|
litellm_params=litellm_params,
|
||||||
|
)
|
||||||
return logging_obj
|
return logging_obj
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import logging
|
import logging
|
||||||
|
@ -5731,15 +5740,6 @@ def exception_type(
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider="openai",
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
exception_mapping_worked = True
|
|
||||||
raise APIError(
|
|
||||||
status_code=original_exception.status_code,
|
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
|
||||||
llm_provider="openai",
|
|
||||||
model=model,
|
|
||||||
request=original_exception.request,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
|
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
|
||||||
raise APIConnectionError(
|
raise APIConnectionError(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue