mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
feat(aporio_ai.py): support aporio ai prompt injection for chat completion requests
Closes https://github.com/BerriAI/litellm/issues/2950
This commit is contained in:
parent
e587d32058
commit
07d90f6739
5 changed files with 217 additions and 6 deletions
|
@ -453,8 +453,10 @@ class _PROXY_MaxParallelRequestsHandler(CustomLogger):
|
|||
async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time):
|
||||
try:
|
||||
self.print_verbose(f"Inside Max Parallel Request Failure Hook")
|
||||
global_max_parallel_requests = kwargs["litellm_params"]["metadata"].get(
|
||||
"global_max_parallel_requests", None
|
||||
global_max_parallel_requests = (
|
||||
kwargs["litellm_params"]
|
||||
.get("metadata", {})
|
||||
.get("global_max_parallel_requests", None)
|
||||
)
|
||||
user_api_key = (
|
||||
kwargs["litellm_params"].get("metadata", {}).get("user_api_key", None)
|
||||
|
@ -516,5 +518,7 @@ class _PROXY_MaxParallelRequestsHandler(CustomLogger):
|
|||
) # save in cache for up to 1 min.
|
||||
except Exception as e:
|
||||
verbose_proxy_logger.info(
|
||||
f"Inside Parallel Request Limiter: An exception occurred - {str(e)}."
|
||||
"Inside Parallel Request Limiter: An exception occurred - {}\n{}".format(
|
||||
str(e), traceback.format_exc()
|
||||
)
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue