mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(feat) view spend per tag
This commit is contained in:
parent
23589a398a
commit
138f5ceb1e
5 changed files with 171 additions and 0 deletions
37
litellm/proxy/enterprise/LICENSE.md
Normal file
37
litellm/proxy/enterprise/LICENSE.md
Normal file
|
@ -0,0 +1,37 @@
|
|||
|
||||
The BerriAI Enterprise license (the "Enterprise License")
|
||||
Copyright (c) 2024 - present Berrie AI Inc.
|
||||
|
||||
With regard to the BerriAI Software:
|
||||
|
||||
This software and associated documentation files (the "Software") may only be
|
||||
used in production, if you (and any entity that you represent) have agreed to,
|
||||
and are in compliance with, the BerriAI Subscription Terms of Service, available
|
||||
via [call](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) or email (info@berri.ai) (the "Enterprise Terms"), or other
|
||||
agreement governing the use of the Software, as agreed by you and BerriAI,
|
||||
and otherwise have a valid BerriAI Enterprise license for the
|
||||
correct number of user seats. Subject to the foregoing sentence, you are free to
|
||||
modify this Software and publish patches to the Software. You agree that BerriAI
|
||||
and/or its licensors (as applicable) retain all right, title and interest in and
|
||||
to all such modifications and/or patches, and all such modifications and/or
|
||||
patches may only be used, copied, modified, displayed, distributed, or otherwise
|
||||
exploited with a valid BerriAI Enterprise license for the correct
|
||||
number of user seats. Notwithstanding the foregoing, you may copy and modify
|
||||
the Software for development and testing purposes, without requiring a
|
||||
subscription. You agree that BerriAI and/or its licensors (as applicable) retain
|
||||
all right, title and interest in and to all such modifications. You are not
|
||||
granted any other rights beyond what is expressly stated herein. Subject to the
|
||||
foregoing, it is forbidden to copy, merge, publish, distribute, sublicense,
|
||||
and/or sell the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
For all third party components incorporated into the BerriAI Software, those
|
||||
components are licensed under the original license provided by the owner of the
|
||||
applicable component.
|
12
litellm/proxy/enterprise/README.md
Normal file
12
litellm/proxy/enterprise/README.md
Normal file
|
@ -0,0 +1,12 @@
|
|||
## LiteLLM Enterprise
|
||||
|
||||
Code in this folder is licensed under a commercial license. Please review the [LICENSE](./LICENSE.md) file within the /enterprise folder
|
||||
|
||||
**These features are covered under the LiteLLM Enterprise contract**
|
||||
|
||||
👉 **Using in an Enterprise / Need specific features ?** Meet with us [here](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat?month=2024-02)
|
||||
|
||||
## Features:
|
||||
- Custom API / microservice callbacks
|
||||
- Google Text Moderation API
|
||||
|
31
litellm/proxy/enterprise/callbacks/example_logging_api.py
Normal file
31
litellm/proxy/enterprise/callbacks/example_logging_api.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# this is an example endpoint to receive data from litellm
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.post("/log-event")
|
||||
async def log_event(request: Request):
|
||||
try:
|
||||
print("Received /log-event request") # noqa
|
||||
# Assuming the incoming request has JSON data
|
||||
data = await request.json()
|
||||
print("Received request data:") # noqa
|
||||
print(data) # noqa
|
||||
|
||||
# Your additional logic can go here
|
||||
# For now, just printing the received data
|
||||
|
||||
return {"message": "Request received successfully"}
|
||||
except Exception as e:
|
||||
print(f"Error processing request: {str(e)}") # noqa
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="127.0.0.1", port=8000)
|
16
litellm/proxy/enterprise/utils.py
Normal file
16
litellm/proxy/enterprise/utils.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# Enterprise Proxy Util Endpoints
|
||||
|
||||
|
||||
async def get_spend_by_tags(start_date=None, end_date=None, prisma_client=None):
|
||||
response = await prisma_client.db.query_raw(
|
||||
"""
|
||||
SELECT
|
||||
jsonb_array_elements_text(request_tags) AS individual_request_tag,
|
||||
COUNT(*) AS log_count,
|
||||
SUM(spend) AS total_spend
|
||||
FROM "LiteLLM_SpendLogs"
|
||||
GROUP BY individual_request_tag;
|
||||
"""
|
||||
)
|
||||
|
||||
return response
|
|
@ -3482,6 +3482,81 @@ async def spend_user_fn(
|
|||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/spend/tags",
|
||||
tags=["budget & spend Tracking"],
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
responses={
|
||||
200: {"model": List[LiteLLM_SpendLogs]},
|
||||
},
|
||||
)
|
||||
async def view_spend_tags(
|
||||
start_date: Optional[str] = fastapi.Query(
|
||||
default=None,
|
||||
description="Time from which to start viewing key spend",
|
||||
),
|
||||
end_date: Optional[str] = fastapi.Query(
|
||||
default=None,
|
||||
description="Time till which to view key spend",
|
||||
),
|
||||
):
|
||||
"""
|
||||
LiteLLM Enterprise - View Spend Per Request Tag
|
||||
|
||||
Example Request:
|
||||
```
|
||||
curl -X GET "http://0.0.0.0:8000/spend/tags" \
|
||||
-H "Authorization: Bearer sk-1234"
|
||||
```
|
||||
|
||||
Spend with Start Date and End Date
|
||||
```
|
||||
curl -X GET "http://0.0.0.0:8000/spend/tags?start_date=2022-01-01&end_date=2022-02-01" \
|
||||
-H "Authorization: Bearer sk-1234"
|
||||
```
|
||||
"""
|
||||
|
||||
from litellm.proxy.enterprise.utils import get_spend_by_tags
|
||||
|
||||
global prisma_client
|
||||
try:
|
||||
if prisma_client is None:
|
||||
raise Exception(
|
||||
f"Database not connected. Connect a database to your proxy - https://docs.litellm.ai/docs/simple_proxy#managing-auth---virtual-keys"
|
||||
)
|
||||
|
||||
# run the following SQL query on prisma
|
||||
"""
|
||||
SELECT
|
||||
jsonb_array_elements_text(request_tags) AS individual_request_tag,
|
||||
COUNT(*) AS log_count,
|
||||
SUM(spend) AS total_spend
|
||||
FROM "LiteLLM_SpendLogs"
|
||||
GROUP BY individual_request_tag;
|
||||
"""
|
||||
response = await get_spend_by_tags(
|
||||
start_date=start_date, end_date=end_date, prisma_client=prisma_client
|
||||
)
|
||||
|
||||
return response
|
||||
except Exception as e:
|
||||
if isinstance(e, HTTPException):
|
||||
raise ProxyException(
|
||||
message=getattr(e, "detail", f"/spend/tags Error({str(e)})"),
|
||||
type="internal_error",
|
||||
param=getattr(e, "param", "None"),
|
||||
code=getattr(e, "status_code", status.HTTP_500_INTERNAL_SERVER_ERROR),
|
||||
)
|
||||
elif isinstance(e, ProxyException):
|
||||
raise e
|
||||
raise ProxyException(
|
||||
message="/spend/tags Error" + str(e),
|
||||
type="internal_error",
|
||||
param=getattr(e, "param", "None"),
|
||||
code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/spend/logs",
|
||||
tags=["budget & spend Tracking"],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue