v0 return cache key in responses

This commit is contained in:
Ishaan Jaff 2024-04-04 10:11:18 -07:00
parent 4b56f08cbe
commit 9dc4127576
2 changed files with 7 additions and 4 deletions

View file

@ -3432,10 +3432,10 @@ async def chat_completion(
# Post Call Processing
data["litellm_status"] = "success" # used for alerting
if hasattr(response, "_hidden_params"):
model_id = response._hidden_params.get("model_id", None) or ""
else:
model_id = ""
hidden_params = getattr(response, "_hidden_params", {}) or {}
model_id = hidden_params.get("model_id", None) or ""
cache_key = hidden_params.get("cache_key", None) or ""
if (
"stream" in data and data["stream"] == True
@ -3451,6 +3451,7 @@ async def chat_completion(
)
fastapi_response.headers["x-litellm-model-id"] = model_id
fastapi_response.headers["x-litellm-cache-key"] = cache_key
### CALL HOOKS ### - modify outgoing data
response = await proxy_logging_obj.post_call_success_hook(