fix token counter endpoint

This commit is contained in:
Ishaan Jaff 2024-05-16 10:03:21 -07:00
parent b790d65d28
commit c646b809a6
2 changed files with 6 additions and 3 deletions

View file

@ -1012,4 +1012,3 @@ class TokenCountRequest(LiteLLMBase):
class TokenCountResponse(LiteLLMBase):
total_tokens: int
model: str
custom_llm_provider: str

View file

@ -4767,8 +4767,9 @@ async def moderations(
@router.post(
"/utils/token_counter",
tags=["LLM Utils"],
tags=["llm utils"],
dependencies=[Depends(user_api_key_auth)],
response_model=TokenCountResponse,
)
async def token_counter(request: TokenCountRequest):
""" """
@ -4786,7 +4787,10 @@ async def token_counter(request: TokenCountRequest):
text=prompt,
messages=messages,
)
return {"total_tokens": total_tokens}
return TokenCountResponse(
total_tokens=total_tokens,
model=request.model,
)
#### KEY MANAGEMENT ####