mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(feat) view spend per tag
This commit is contained in:
parent
23589a398a
commit
138f5ceb1e
5 changed files with 171 additions and 0 deletions
31
litellm/proxy/enterprise/callbacks/example_logging_api.py
Normal file
31
litellm/proxy/enterprise/callbacks/example_logging_api.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# this is an example endpoint to receive data from litellm
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.post("/log-event")
|
||||
async def log_event(request: Request):
|
||||
try:
|
||||
print("Received /log-event request") # noqa
|
||||
# Assuming the incoming request has JSON data
|
||||
data = await request.json()
|
||||
print("Received request data:") # noqa
|
||||
print(data) # noqa
|
||||
|
||||
# Your additional logic can go here
|
||||
# For now, just printing the received data
|
||||
|
||||
return {"message": "Request received successfully"}
|
||||
except Exception as e:
|
||||
print(f"Error processing request: {str(e)}") # noqa
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="127.0.0.1", port=8000)
|
Loading…
Add table
Add a link
Reference in a new issue