diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 1b211976f..16aa4c6b4 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -381,9 +381,11 @@ async def track_cost_callback( elif kwargs["stream"] is False: # regular response input_text = kwargs.get("messages", "") if isinstance(input_text, list): - input_text = "".join(m["content"] for m in input_text) + response_cost = litellm.completion_cost(completion_response=completion_response, messages=input_text) + elif isinstance(input_text, str): + response_cost = litellm.completion_cost(completion_response=completion_response, prompt=input_text) print(f"received completion response: {completion_response}") - response_cost = litellm.completion_cost(completion_response=completion_response, completion=input_text) + print("regular response_cost", response_cost) user_api_key = kwargs["litellm_params"]["metadata"].get("user_api_key", None) print(f"user_api_key - {user_api_key}; prisma_client - {prisma_client}") diff --git a/requirements.txt b/requirements.txt index f153c58f3..b6a15fb7a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,4 +16,5 @@ prisma celery psutil mangum -google-generativeai \ No newline at end of file +google-generativeai +async_generator # for ollama \ No newline at end of file