forked from phoenix/litellm-mirror
Merge branch 'main' of github.com:lunary-ai/litellm
This commit is contained in:
commit
f2a0156e88
61 changed files with 1523 additions and 312 deletions
26
.github/workflows/ghcr_deploy.yml
vendored
26
.github/workflows/ghcr_deploy.yml
vendored
|
@ -147,13 +147,19 @@ jobs:
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
}
|
}
|
||||||
- name: Github Releases To Discord
|
- name: Github Releases To Discord
|
||||||
uses: SethCohen/github-releases-to-discord@v1.13.1
|
env:
|
||||||
with:
|
WEBHOOK_URL: ${{ secrets.WEBHOOK_URL }}
|
||||||
webhook_url: ${{ secrets.WEBHOOK_URL }}
|
run: |
|
||||||
color: "2105893"
|
curl -H "Content-Type: application/json" -X POST -d '{
|
||||||
username: "Release Changelog"
|
"content": "||@everyone||",
|
||||||
avatar_url: "https://cdn.discordapp.com/avatars/487431320314576937/bd64361e4ba6313d561d54e78c9e7171.png"
|
"username": "Release Changelog",
|
||||||
content: "||@everyone||"
|
"avatar_url": "https://cdn.discordapp.com/avatars/487431320314576937/bd64361e4ba6313d561d54e78c9e7171.png",
|
||||||
footer_title: "Changelog"
|
"embeds": [
|
||||||
footer_icon_url: "https://cdn.discordapp.com/avatars/487431320314576937/bd64361e4ba6313d561d54e78c9e7171.png"
|
{
|
||||||
footer_timestamp: true
|
"title": "Changelog",
|
||||||
|
"description": "This is the changelog for the latest release.",
|
||||||
|
"color": 2105893
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}' $WEBHOOK_URL
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,29 @@
|
||||||
# Enterprise
|
# Enterprise
|
||||||
|
For companies that need better security, user management and professional support
|
||||||
LiteLLM offers dedicated enterprise support.
|
|
||||||
|
|
||||||
This covers:
|
|
||||||
- **Feature Prioritization**
|
|
||||||
- **Custom Integrations**
|
|
||||||
- **Professional Support - Dedicated discord + slack**
|
|
||||||
- **Custom SLAs**
|
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
[Talk to founders](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat)
|
[Talk to founders](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat)
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
This covers:
|
||||||
|
- ✅ **Features under the [LiteLLM Commercial License](https://docs.litellm.ai/docs/proxy/enterprise):**
|
||||||
|
- ✅ **Feature Prioritization**
|
||||||
|
- ✅ **Custom Integrations**
|
||||||
|
- ✅ **Professional Support - Dedicated discord + slack**
|
||||||
|
- ✅ **Custom SLAs**
|
||||||
|
- ✅ **Secure access with Single Sign-On**
|
||||||
|
|
||||||
|
|
||||||
|
## Frequently Asked Questions
|
||||||
|
|
||||||
|
### What topics does Professional support cover and what SLAs do you offer?
|
||||||
|
|
||||||
|
Professional Support can assist with LLM/Provider integrations, deployment, upgrade management, and LLM Provider troubleshooting. We can’t solve your own infrastructure-related issues but we will guide you to fix them.
|
||||||
|
|
||||||
|
We offer custom SLAs based on your needs and the severity of the issue. The standard SLA is 6 hours for Sev0-Sev1 severity and 24h for Sev2-Sev3 between 7am – 7pm PT (Monday through Saturday).
|
||||||
|
|
||||||
|
### What’s the cost of the Self-Managed Enterprise edition?
|
||||||
|
|
||||||
|
Self-Managed Enterprise deployments require our team to understand your exact needs. [Get in touch with us to learn more](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat)
|
||||||
|
|
|
@ -2,16 +2,36 @@
|
||||||
|
|
||||||
## Using Mistral models deployed on Azure AI Studio
|
## Using Mistral models deployed on Azure AI Studio
|
||||||
|
|
||||||
**Ensure you have the `/v1` in your api_base**
|
### Sample Usage - setting env vars
|
||||||
|
|
||||||
|
Set `MISTRAL_AZURE_API_KEY` and `MISTRAL_AZURE_API_BASE` in your env
|
||||||
|
|
||||||
|
```shell
|
||||||
|
MISTRAL_AZURE_API_KEY = "zE************""
|
||||||
|
MISTRAL_AZURE_API_BASE = "https://Mistral-large-nmefg-serverless.eastus2.inference.ai.azure.com"
|
||||||
|
```
|
||||||
|
|
||||||
### Sample Usage
|
|
||||||
```python
|
```python
|
||||||
from litellm import completion
|
from litellm import completion
|
||||||
import os
|
import os
|
||||||
|
|
||||||
response = completion(
|
response = completion(
|
||||||
model="mistral/Mistral-large-dfgfj",
|
model="mistral/Mistral-large-dfgfj",
|
||||||
api_base="https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com/v1",
|
messages=[
|
||||||
|
{"role": "user", "content": "hello from litellm"}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Sample Usage - passing `api_base` and `api_key` to `litellm.completion`
|
||||||
|
```python
|
||||||
|
from litellm import completion
|
||||||
|
import os
|
||||||
|
|
||||||
|
response = completion(
|
||||||
|
model="mistral/Mistral-large-dfgfj",
|
||||||
|
api_base="https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com",
|
||||||
api_key = "JGbKodRcTp****"
|
api_key = "JGbKodRcTp****"
|
||||||
messages=[
|
messages=[
|
||||||
{"role": "user", "content": "hello from litellm"}
|
{"role": "user", "content": "hello from litellm"}
|
||||||
|
@ -23,14 +43,12 @@ print(response)
|
||||||
### [LiteLLM Proxy] Using Mistral Models
|
### [LiteLLM Proxy] Using Mistral Models
|
||||||
|
|
||||||
Set this on your litellm proxy config.yaml
|
Set this on your litellm proxy config.yaml
|
||||||
|
|
||||||
**Ensure you have the `/v1` in your api_base**
|
|
||||||
```yaml
|
```yaml
|
||||||
model_list:
|
model_list:
|
||||||
- model_name: mistral
|
- model_name: mistral
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: mistral/Mistral-large-dfgfj
|
model: mistral/Mistral-large-dfgfj
|
||||||
api_base: https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com/v1
|
api_base: https://Mistral-large-dfgfj-serverless.eastus2.inference.ai.azure.com
|
||||||
api_key: JGbKodRcTp****
|
api_key: JGbKodRcTp****
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Groq
|
# Groq
|
||||||
https://groq.com/
|
https://groq.com/
|
||||||
|
|
||||||
|
**We support ALL Groq models, just set `groq/` as a prefix when sending completion requests**
|
||||||
|
|
||||||
## API Key
|
## API Key
|
||||||
```python
|
```python
|
||||||
# env variable
|
# env variable
|
||||||
|
@ -47,3 +49,4 @@ We support ALL Groq models, just set `groq/` as a prefix when sending completion
|
||||||
| Model Name | Function Call |
|
| Model Name | Function Call |
|
||||||
|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| llama2-70b-4096 | `completion(model="groq/llama2-70b-4096", messages)` |
|
| llama2-70b-4096 | `completion(model="groq/llama2-70b-4096", messages)` |
|
||||||
|
| mixtral-8x7b-32768 | `completion(model="groq/mixtral-8x7b-32768", messages)` |
|
||||||
|
|
|
@ -152,8 +152,14 @@ LiteLLM Supports the following image types passed in `url`
|
||||||
- Images with Cloud Storage URIs - gs://cloud-samples-data/generative-ai/image/boats.jpeg
|
- Images with Cloud Storage URIs - gs://cloud-samples-data/generative-ai/image/boats.jpeg
|
||||||
- Images with direct links - https://storage.googleapis.com/github-repo/img/gemini/intro/landmark3.jpg
|
- Images with direct links - https://storage.googleapis.com/github-repo/img/gemini/intro/landmark3.jpg
|
||||||
- Videos with Cloud Storage URIs - https://storage.googleapis.com/github-repo/img/gemini/multimodality_usecases_overview/pixel8.mp4
|
- Videos with Cloud Storage URIs - https://storage.googleapis.com/github-repo/img/gemini/multimodality_usecases_overview/pixel8.mp4
|
||||||
|
- Base64 Encoded Local Images
|
||||||
|
|
||||||
|
**Example Request - image url**
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
|
||||||
|
<TabItem value="direct" label="Images with direct links">
|
||||||
|
|
||||||
**Example Request**
|
|
||||||
```python
|
```python
|
||||||
import litellm
|
import litellm
|
||||||
|
|
||||||
|
@ -179,6 +185,43 @@ response = litellm.completion(
|
||||||
)
|
)
|
||||||
print(response)
|
print(response)
|
||||||
```
|
```
|
||||||
|
</TabItem>
|
||||||
|
|
||||||
|
<TabItem value="base" label="Local Base64 Images">
|
||||||
|
|
||||||
|
```python
|
||||||
|
import litellm
|
||||||
|
|
||||||
|
def encode_image(image_path):
|
||||||
|
import base64
|
||||||
|
|
||||||
|
with open(image_path, "rb") as image_file:
|
||||||
|
return base64.b64encode(image_file.read()).decode("utf-8")
|
||||||
|
|
||||||
|
image_path = "cached_logo.jpg"
|
||||||
|
# Getting the base64 string
|
||||||
|
base64_image = encode_image(image_path)
|
||||||
|
response = litellm.completion(
|
||||||
|
model="vertex_ai/gemini-pro-vision",
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{"type": "text", "text": "Whats in this image?"},
|
||||||
|
{
|
||||||
|
"type": "image_url",
|
||||||
|
"image_url": {
|
||||||
|
"url": "data:image/jpeg;base64," + base64_image
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
```
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
|
||||||
## Chat Models
|
## Chat Models
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Multiple Instances of 1 model
|
# Load Balancing - Config Setup
|
||||||
Load balance multiple instances of the same model
|
Load balance multiple instances of the same model
|
||||||
|
|
||||||
The proxy will handle routing requests (using LiteLLM's Router). **Set `rpm` in the config if you want maximize throughput**
|
The proxy will handle routing requests (using LiteLLM's Router). **Set `rpm` in the config if you want maximize throughput**
|
||||||
|
@ -79,6 +79,32 @@ curl --location 'http://0.0.0.0:8000/chat/completions' \
|
||||||
'
|
'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Load Balancing using multiple litellm instances (Kubernetes, Auto Scaling)
|
||||||
|
|
||||||
|
LiteLLM Proxy supports sharing rpm/tpm shared across multiple litellm instances, pass `redis_host`, `redis_password` and `redis_port` to enable this. (LiteLLM will use Redis to track rpm/tpm usage )
|
||||||
|
|
||||||
|
Example config
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
model_list:
|
||||||
|
- model_name: gpt-3.5-turbo
|
||||||
|
litellm_params:
|
||||||
|
model: azure/<your-deployment-name>
|
||||||
|
api_base: <your-azure-endpoint>
|
||||||
|
api_key: <your-azure-api-key>
|
||||||
|
rpm: 6 # Rate limit for this deployment: in requests per minute (rpm)
|
||||||
|
- model_name: gpt-3.5-turbo
|
||||||
|
litellm_params:
|
||||||
|
model: azure/gpt-turbo-small-ca
|
||||||
|
api_base: https://my-endpoint-canada-berri992.openai.azure.com/
|
||||||
|
api_key: <your-azure-api-key>
|
||||||
|
rpm: 6
|
||||||
|
router_settings:
|
||||||
|
redis_host: <your redis host>
|
||||||
|
redis_password: <your redis password>
|
||||||
|
redis_port: 1992
|
||||||
|
```
|
||||||
|
|
||||||
## Router settings on config - routing_strategy, model_group_alias
|
## Router settings on config - routing_strategy, model_group_alias
|
||||||
|
|
||||||
litellm.Router() settings can be set under `router_settings`. You can set `model_group_alias`, `routing_strategy`, `num_retries`,`timeout` . See all Router supported params [here](https://github.com/BerriAI/litellm/blob/1b942568897a48f014fa44618ec3ce54d7570a46/litellm/router.py#L64)
|
litellm.Router() settings can be set under `router_settings`. You can set `model_group_alias`, `routing_strategy`, `num_retries`,`timeout` . See all Router supported params [here](https://github.com/BerriAI/litellm/blob/1b942568897a48f014fa44618ec3ce54d7570a46/litellm/router.py#L64)
|
||||||
|
@ -103,4 +129,7 @@ router_settings:
|
||||||
routing_strategy: least-busy # Literal["simple-shuffle", "least-busy", "usage-based-routing", "latency-based-routing"]
|
routing_strategy: least-busy # Literal["simple-shuffle", "least-busy", "usage-based-routing", "latency-based-routing"]
|
||||||
num_retries: 2
|
num_retries: 2
|
||||||
timeout: 30 # 30 seconds
|
timeout: 30 # 30 seconds
|
||||||
|
redis_host: <your redis host>
|
||||||
|
redis_password: <your redis password>
|
||||||
|
redis_port: 1992
|
||||||
```
|
```
|
|
@ -2,7 +2,7 @@ import Image from '@theme/IdealImage';
|
||||||
import Tabs from '@theme/Tabs';
|
import Tabs from '@theme/Tabs';
|
||||||
import TabItem from '@theme/TabItem';
|
import TabItem from '@theme/TabItem';
|
||||||
|
|
||||||
# 🔑 [BETA] Proxy UI
|
# [BETA] Proxy UI
|
||||||
### **Create + delete keys through a UI**
|
### **Create + delete keys through a UI**
|
||||||
|
|
||||||
[Let users create their own keys](#setup-ssoauth-for-ui)
|
[Let users create their own keys](#setup-ssoauth-for-ui)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Virtual Keys, Users
|
# 🔑 Virtual Keys, Users
|
||||||
Track Spend, Set budgets and create virtual keys for the proxy
|
Track Spend, Set budgets and create virtual keys for the proxy
|
||||||
|
|
||||||
Grant other's temporary access to your proxy, with keys that expire after a set duration.
|
Grant other's temporary access to your proxy, with keys that expire after a set duration.
|
||||||
|
@ -343,18 +343,83 @@ A key will be generated for the new user created
|
||||||
"key_name": null,
|
"key_name": null,
|
||||||
"expires": null
|
"expires": null
|
||||||
}
|
}
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Request Params:
|
|
||||||
- keys: List[str] - List of keys to delete
|
## /user/info
|
||||||
|
|
||||||
|
### Request
|
||||||
|
|
||||||
|
#### View all Users
|
||||||
|
If you're trying to view all users, we recommend using pagination with the following args
|
||||||
|
- `view_all=true`
|
||||||
|
- `page=0` Optional(int) min = 0, default=0
|
||||||
|
- `page_size=25` Optional(int) min = 1, default = 25
|
||||||
|
```shell
|
||||||
|
curl -X GET "http://0.0.0.0:4000/user/info?view_all=true&page=0&page_size=25" -H "Authorization: Bearer sk-1234"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### View specific user_id
|
||||||
|
```shell
|
||||||
|
curl -X GET "http://0.0.0.0:4000/user/info?user_id=228da235-eef0-4c30-bf53-5d6ac0d278c2" -H "Authorization: Bearer sk-1234"
|
||||||
|
```
|
||||||
|
|
||||||
### Response
|
### Response
|
||||||
|
View user spend, budget, models, keys and teams
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"deleted_keys": ["sk-kdEXbIqZRwEeEiHwdg7sFA"]
|
"user_id": "228da235-eef0-4c30-bf53-5d6ac0d278c2",
|
||||||
|
"user_info": {
|
||||||
|
"user_id": "228da235-eef0-4c30-bf53-5d6ac0d278c2",
|
||||||
|
"team_id": null,
|
||||||
|
"teams": [],
|
||||||
|
"user_role": "app_user",
|
||||||
|
"max_budget": null,
|
||||||
|
"spend": 200000.0,
|
||||||
|
"user_email": null,
|
||||||
|
"models": [],
|
||||||
|
"max_parallel_requests": null,
|
||||||
|
"tpm_limit": null,
|
||||||
|
"rpm_limit": null,
|
||||||
|
"budget_duration": null,
|
||||||
|
"budget_reset_at": null,
|
||||||
|
"allowed_cache_controls": [],
|
||||||
|
"model_spend": {
|
||||||
|
"chatgpt-v-2": 200000
|
||||||
|
},
|
||||||
|
"model_max_budget": {}
|
||||||
|
},
|
||||||
|
"keys": [
|
||||||
|
{
|
||||||
|
"token": "16c337f9df00a0e6472627e39a2ed02e67bc9a8a760c983c4e9b8cad7954f3c0",
|
||||||
|
"key_name": null,
|
||||||
|
"key_alias": null,
|
||||||
|
"spend": 200000.0,
|
||||||
|
"expires": null,
|
||||||
|
"models": [],
|
||||||
|
"aliases": {},
|
||||||
|
"config": {},
|
||||||
|
"user_id": "228da235-eef0-4c30-bf53-5d6ac0d278c2",
|
||||||
|
"team_id": null,
|
||||||
|
"permissions": {},
|
||||||
|
"max_parallel_requests": null,
|
||||||
|
"metadata": {},
|
||||||
|
"tpm_limit": null,
|
||||||
|
"rpm_limit": null,
|
||||||
|
"max_budget": null,
|
||||||
|
"budget_duration": null,
|
||||||
|
"budget_reset_at": null,
|
||||||
|
"allowed_cache_controls": [],
|
||||||
|
"model_spend": {
|
||||||
|
"chatgpt-v-2": 200000
|
||||||
|
},
|
||||||
|
"model_max_budget": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"teams": []
|
||||||
}
|
}
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Advanced
|
## Advanced
|
||||||
|
|
|
@ -123,6 +123,8 @@ const sidebars = {
|
||||||
"providers/aws_sagemaker",
|
"providers/aws_sagemaker",
|
||||||
"providers/bedrock",
|
"providers/bedrock",
|
||||||
"providers/anyscale",
|
"providers/anyscale",
|
||||||
|
"providers/huggingface",
|
||||||
|
"providers/ollama",
|
||||||
"providers/perplexity",
|
"providers/perplexity",
|
||||||
"providers/groq",
|
"providers/groq",
|
||||||
"providers/vllm",
|
"providers/vllm",
|
||||||
|
|
|
@ -245,3 +245,103 @@ def _create_clickhouse_aggregate_tables(client=None, table_names=[]):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def _forecast_daily_cost(data: list):
|
||||||
|
import requests
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
first_entry = data[0]
|
||||||
|
last_entry = data[-1]
|
||||||
|
|
||||||
|
# get the date today
|
||||||
|
today_date = datetime.today().date()
|
||||||
|
|
||||||
|
today_day_month = today_date.month
|
||||||
|
|
||||||
|
# Parse the date from the first entry
|
||||||
|
first_entry_date = datetime.strptime(first_entry["date"], "%Y-%m-%d").date()
|
||||||
|
last_entry_date = datetime.strptime(last_entry["date"], "%Y-%m-%d")
|
||||||
|
|
||||||
|
print("last entry date", last_entry_date)
|
||||||
|
|
||||||
|
# Assuming today_date is a datetime object
|
||||||
|
today_date = datetime.now()
|
||||||
|
|
||||||
|
# Calculate the last day of the month
|
||||||
|
last_day_of_todays_month = datetime(
|
||||||
|
today_date.year, today_date.month % 12 + 1, 1
|
||||||
|
) - timedelta(days=1)
|
||||||
|
|
||||||
|
# Calculate the remaining days in the month
|
||||||
|
remaining_days = (last_day_of_todays_month - last_entry_date).days
|
||||||
|
|
||||||
|
current_spend_this_month = 0
|
||||||
|
series = {}
|
||||||
|
for entry in data:
|
||||||
|
date = entry["date"]
|
||||||
|
spend = entry["spend"]
|
||||||
|
series[date] = spend
|
||||||
|
|
||||||
|
# check if the date is in this month
|
||||||
|
if datetime.strptime(date, "%Y-%m-%d").month == today_day_month:
|
||||||
|
current_spend_this_month += spend
|
||||||
|
|
||||||
|
if len(series) < 10:
|
||||||
|
num_items_to_fill = 11 - len(series)
|
||||||
|
|
||||||
|
# avg spend for all days in series
|
||||||
|
avg_spend = sum(series.values()) / len(series)
|
||||||
|
for i in range(num_items_to_fill):
|
||||||
|
# go backwards from the first entry
|
||||||
|
date = first_entry_date - timedelta(days=i)
|
||||||
|
series[date.strftime("%Y-%m-%d")] = avg_spend
|
||||||
|
series[date.strftime("%Y-%m-%d")] = avg_spend
|
||||||
|
|
||||||
|
payload = {"series": series, "count": remaining_days}
|
||||||
|
print("Prediction Data:", payload)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(
|
||||||
|
url="https://trend-api-production.up.railway.app/forecast",
|
||||||
|
json=payload,
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
# check the status code
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
json_response = response.json()
|
||||||
|
forecast_data = json_response["forecast"]
|
||||||
|
|
||||||
|
# print("Forecast Data:", forecast_data)
|
||||||
|
|
||||||
|
response_data = []
|
||||||
|
total_predicted_spend = current_spend_this_month
|
||||||
|
for date in forecast_data:
|
||||||
|
spend = forecast_data[date]
|
||||||
|
entry = {
|
||||||
|
"date": date,
|
||||||
|
"predicted_spend": spend,
|
||||||
|
}
|
||||||
|
total_predicted_spend += spend
|
||||||
|
response_data.append(entry)
|
||||||
|
|
||||||
|
# get month as a string, Jan, Feb, etc.
|
||||||
|
today_month = today_date.strftime("%B")
|
||||||
|
predicted_spend = (
|
||||||
|
f"Predicted Spend for { today_month } 2024, ${total_predicted_spend}"
|
||||||
|
)
|
||||||
|
return {"response": response_data, "predicted_spend": predicted_spend}
|
||||||
|
|
||||||
|
# print(f"Date: {entry['date']}, Spend: {entry['spend']}, Response: {response.text}")
|
||||||
|
|
||||||
|
|
||||||
|
# _forecast_daily_cost(
|
||||||
|
# [
|
||||||
|
# {"date": "2022-01-01", "spend": 100},
|
||||||
|
|
||||||
|
# ]
|
||||||
|
# )
|
||||||
|
|
|
@ -68,9 +68,9 @@ class OllamaConfig:
|
||||||
repeat_last_n: Optional[int] = None
|
repeat_last_n: Optional[int] = None
|
||||||
repeat_penalty: Optional[float] = None
|
repeat_penalty: Optional[float] = None
|
||||||
temperature: Optional[float] = None
|
temperature: Optional[float] = None
|
||||||
stop: Optional[
|
stop: Optional[list] = (
|
||||||
list
|
None # stop is a list based on this - https://github.com/jmorganca/ollama/pull/442
|
||||||
] = None # stop is a list based on this - https://github.com/jmorganca/ollama/pull/442
|
)
|
||||||
tfs_z: Optional[float] = None
|
tfs_z: Optional[float] = None
|
||||||
num_predict: Optional[int] = None
|
num_predict: Optional[int] = None
|
||||||
top_k: Optional[int] = None
|
top_k: Optional[int] = None
|
||||||
|
@ -147,6 +147,11 @@ def get_ollama_response(
|
||||||
|
|
||||||
stream = optional_params.pop("stream", False)
|
stream = optional_params.pop("stream", False)
|
||||||
format = optional_params.pop("format", None)
|
format = optional_params.pop("format", None)
|
||||||
|
|
||||||
|
for m in messages:
|
||||||
|
if "role" in m and m["role"] == "tool":
|
||||||
|
m["role"] = "assistant"
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
|
|
|
@ -334,10 +334,14 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
model_response_object=model_response,
|
model_response_object=model_response,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if print_verbose is not None:
|
||||||
|
print_verbose(f"openai.py: Received openai error - {str(e)}")
|
||||||
if (
|
if (
|
||||||
"Conversation roles must alternate user/assistant" in str(e)
|
"Conversation roles must alternate user/assistant" in str(e)
|
||||||
or "user and assistant roles should be alternating" in str(e)
|
or "user and assistant roles should be alternating" in str(e)
|
||||||
) and messages is not None:
|
) and messages is not None:
|
||||||
|
if print_verbose is not None:
|
||||||
|
print_verbose("openai.py: REFORMATS THE MESSAGE!")
|
||||||
# reformat messages to ensure user/assistant are alternating, if there's either 2 consecutive 'user' messages or 2 consecutive 'assistant' message, add a blank 'user' or 'assistant' message to ensure compatibility
|
# reformat messages to ensure user/assistant are alternating, if there's either 2 consecutive 'user' messages or 2 consecutive 'assistant' message, add a blank 'user' or 'assistant' message to ensure compatibility
|
||||||
new_messages = []
|
new_messages = []
|
||||||
for i in range(len(messages) - 1): # type: ignore
|
for i in range(len(messages) - 1): # type: ignore
|
||||||
|
|
|
@ -225,6 +225,24 @@ def _gemini_vision_convert_messages(messages: list):
|
||||||
part_mime = "video/mp4"
|
part_mime = "video/mp4"
|
||||||
google_clooud_part = Part.from_uri(img, mime_type=part_mime)
|
google_clooud_part = Part.from_uri(img, mime_type=part_mime)
|
||||||
processed_images.append(google_clooud_part)
|
processed_images.append(google_clooud_part)
|
||||||
|
elif "base64" in img:
|
||||||
|
# Case 4: Images with base64 encoding
|
||||||
|
import base64, re
|
||||||
|
|
||||||
|
# base 64 is passed as data:image/jpeg;base64,<base-64-encoded-image>
|
||||||
|
image_metadata, img_without_base_64 = img.split(",")
|
||||||
|
|
||||||
|
# read mime_type from img_without_base_64=data:image/jpeg;base64
|
||||||
|
# Extract MIME type using regular expression
|
||||||
|
mime_type_match = re.match(r"data:(.*?);base64", image_metadata)
|
||||||
|
|
||||||
|
if mime_type_match:
|
||||||
|
mime_type = mime_type_match.group(1)
|
||||||
|
else:
|
||||||
|
mime_type = "image/jpeg"
|
||||||
|
decoded_img = base64.b64decode(img_without_base_64)
|
||||||
|
processed_image = Part.from_data(data=decoded_img, mime_type=mime_type)
|
||||||
|
processed_images.append(processed_image)
|
||||||
return prompt, processed_images
|
return prompt, processed_images
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e
|
raise e
|
||||||
|
@ -282,7 +300,9 @@ def completion(
|
||||||
f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}"
|
f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}"
|
||||||
)
|
)
|
||||||
creds, _ = google.auth.default(quota_project_id=vertex_project)
|
creds, _ = google.auth.default(quota_project_id=vertex_project)
|
||||||
print_verbose(f"VERTEX AI: creds={creds}")
|
print_verbose(
|
||||||
|
f"VERTEX AI: creds={creds}; google application credentials: {os.getenv('GOOGLE_APPLICATION_CREDENTIALS')}"
|
||||||
|
)
|
||||||
vertexai.init(
|
vertexai.init(
|
||||||
project=vertex_project, location=vertex_location, credentials=creds
|
project=vertex_project, location=vertex_location, credentials=creds
|
||||||
)
|
)
|
||||||
|
|
|
@ -424,6 +424,22 @@
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true
|
"supports_function_calling": true
|
||||||
},
|
},
|
||||||
|
"azure/mistral-large-latest": {
|
||||||
|
"max_tokens": 32000,
|
||||||
|
"input_cost_per_token": 0.000008,
|
||||||
|
"output_cost_per_token": 0.000024,
|
||||||
|
"litellm_provider": "azure",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true
|
||||||
|
},
|
||||||
|
"azure/mistral-large-2402": {
|
||||||
|
"max_tokens": 32000,
|
||||||
|
"input_cost_per_token": 0.000008,
|
||||||
|
"output_cost_per_token": 0.000024,
|
||||||
|
"litellm_provider": "azure",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true
|
||||||
|
},
|
||||||
"azure/ada": {
|
"azure/ada": {
|
||||||
"max_tokens": 8191,
|
"max_tokens": 8191,
|
||||||
"input_cost_per_token": 0.0000001,
|
"input_cost_per_token": 0.0000001,
|
||||||
|
@ -564,6 +580,20 @@
|
||||||
"litellm_provider": "mistral",
|
"litellm_provider": "mistral",
|
||||||
"mode": "embedding"
|
"mode": "embedding"
|
||||||
},
|
},
|
||||||
|
"groq/llama2-70b-4096": {
|
||||||
|
"max_tokens": 4096,
|
||||||
|
"input_cost_per_token": 0.00000070,
|
||||||
|
"output_cost_per_token": 0.00000080,
|
||||||
|
"litellm_provider": "groq",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"groq/mixtral-8x7b-32768": {
|
||||||
|
"max_tokens": 32768,
|
||||||
|
"input_cost_per_token": 0.00000027,
|
||||||
|
"output_cost_per_token": 0.00000027,
|
||||||
|
"litellm_provider": "groq",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
"claude-instant-1.2": {
|
"claude-instant-1.2": {
|
||||||
"max_tokens": 100000,
|
"max_tokens": 100000,
|
||||||
"max_output_tokens": 8191,
|
"max_output_tokens": 8191,
|
||||||
|
@ -2040,6 +2070,36 @@
|
||||||
"output_cost_per_token": 0.00000028,
|
"output_cost_per_token": 0.00000028,
|
||||||
"litellm_provider": "perplexity",
|
"litellm_provider": "perplexity",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-small-chat": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.00000007,
|
||||||
|
"output_cost_per_token": 0.00000028,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-small-online": {
|
||||||
|
"max_tokens": 12000,
|
||||||
|
"input_cost_per_token": 0,
|
||||||
|
"output_cost_per_token": 0.00000028,
|
||||||
|
"input_cost_per_request": 0.005,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-medium-chat": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.0000006,
|
||||||
|
"output_cost_per_token": 0.0000018,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-medium-online": {
|
||||||
|
"max_tokens": 12000,
|
||||||
|
"input_cost_per_token": 0,
|
||||||
|
"output_cost_per_token": 0.0000018,
|
||||||
|
"input_cost_per_request": 0.005,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
||||||
"max_tokens": 16384,
|
"max_tokens": 16384,
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{11837:function(n,e,t){Promise.resolve().then(t.t.bind(t,99646,23)),Promise.resolve().then(t.t.bind(t,63385,23))},63385:function(){},99646:function(n){n.exports={style:{fontFamily:"'__Inter_c23dc8', '__Inter_Fallback_c23dc8'",fontStyle:"normal"},className:"__className_c23dc8"}}},function(n){n.O(0,[971,69,744],function(){return n(n.s=11837)}),_N_E=n.O()}]);
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{87421:function(n,e,t){Promise.resolve().then(t.t.bind(t,99646,23)),Promise.resolve().then(t.t.bind(t,63385,23))},63385:function(){},99646:function(n){n.exports={style:{fontFamily:"'__Inter_c23dc8', '__Inter_Fallback_c23dc8'",fontStyle:"normal"},className:"__className_c23dc8"}}},function(n){n.O(0,[971,69,744],function(){return n(n.s=87421)}),_N_E=n.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{70377:function(e,n,t){Promise.resolve().then(t.t.bind(t,47690,23)),Promise.resolve().then(t.t.bind(t,48955,23)),Promise.resolve().then(t.t.bind(t,5613,23)),Promise.resolve().then(t.t.bind(t,11902,23)),Promise.resolve().then(t.t.bind(t,31778,23)),Promise.resolve().then(t.t.bind(t,77831,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,69],function(){return n(35317),n(70377)}),_N_E=e.O()}]);
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{32028:function(e,n,t){Promise.resolve().then(t.t.bind(t,47690,23)),Promise.resolve().then(t.t.bind(t,48955,23)),Promise.resolve().then(t.t.bind(t,5613,23)),Promise.resolve().then(t.t.bind(t,11902,23)),Promise.resolve().then(t.t.bind(t,31778,23)),Promise.resolve().then(t.t.bind(t,77831,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,69],function(){return n(35317),n(32028)}),_N_E=e.O()}]);
|
|
@ -1 +1 @@
|
||||||
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/a40ad0909dd7838e.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/16eb955147cb6b2f.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-12184ee6a95c1363.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-a85b2c176012d8e5.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-e1b183dda365ec86.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-096338c8e1915716.js" async="" crossorigin=""></script><title>🚅 LiteLLM</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-12184ee6a95c1363.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/a40ad0909dd7838e.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[30280,[\"303\",\"static/chunks/303-d80f23087a9e6aec.js\",\"931\",\"static/chunks/app/page-8f65fc157f538dff.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/a40ad0909dd7838e.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"kyOCJPBB9pyUfbMKCAXr-\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_c23dc8\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"🚅 LiteLLM\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-6b93c4e1d000ff14.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-a85b2c176012d8e5.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-e1b183dda365ec86.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>🚅 LiteLLM</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-6b93c4e1d000ff14.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/16eb955147cb6b2f.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[9125,[\"730\",\"static/chunks/730-1411b729a1c79695.js\",\"931\",\"static/chunks/app/page-ad3e13d2fec661b5.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/16eb955147cb6b2f.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"SP1Cm97dc_3zo4HlsJJjg\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_c23dc8\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"🚅 LiteLLM\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[30280,["303","static/chunks/303-d80f23087a9e6aec.js","931","static/chunks/app/page-8f65fc157f538dff.js"],""]
|
3:I[9125,["730","static/chunks/730-1411b729a1c79695.js","931","static/chunks/app/page-ad3e13d2fec661b5.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["kyOCJPBB9pyUfbMKCAXr-",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_c23dc8","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/a40ad0909dd7838e.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["SP1Cm97dc_3zo4HlsJJjg",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_c23dc8","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/16eb955147cb6b2f.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"🚅 LiteLLM"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"🚅 LiteLLM"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
|
@ -262,7 +262,19 @@ class NewTeamRequest(LiteLLMBase):
|
||||||
|
|
||||||
class TeamMemberAddRequest(LiteLLMBase):
|
class TeamMemberAddRequest(LiteLLMBase):
|
||||||
team_id: str
|
team_id: str
|
||||||
member: Optional[Member] = None
|
member: Member
|
||||||
|
|
||||||
|
|
||||||
|
class TeamMemberDeleteRequest(LiteLLMBase):
|
||||||
|
team_id: str
|
||||||
|
user_id: Optional[str] = None
|
||||||
|
user_email: Optional[str] = None
|
||||||
|
|
||||||
|
@root_validator(pre=True)
|
||||||
|
def check_user_info(cls, values):
|
||||||
|
if values.get("user_id") is None and values.get("user_email") is None:
|
||||||
|
raise ValueError("Either user id or user email must be provided")
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
class UpdateTeamRequest(LiteLLMBase):
|
class UpdateTeamRequest(LiteLLMBase):
|
||||||
|
|
|
@ -783,6 +783,10 @@ async def user_api_key_auth(
|
||||||
"/v2/key/info",
|
"/v2/key/info",
|
||||||
"/models",
|
"/models",
|
||||||
"/v1/models",
|
"/v1/models",
|
||||||
|
"/global/spend/logs",
|
||||||
|
"/global/spend/keys",
|
||||||
|
"/global/spend/models",
|
||||||
|
"/global/predict/spend/logs",
|
||||||
]
|
]
|
||||||
# check if the current route startswith any of the allowed routes
|
# check if the current route startswith any of the allowed routes
|
||||||
if (
|
if (
|
||||||
|
@ -941,8 +945,7 @@ async def _PROXY_track_cost_callback(
|
||||||
raise Exception("User API key missing from custom callback.")
|
raise Exception("User API key missing from custom callback.")
|
||||||
else:
|
else:
|
||||||
if kwargs["stream"] != True or (
|
if kwargs["stream"] != True or (
|
||||||
kwargs["stream"] == True
|
kwargs["stream"] == True and "complete_streaming_response" in kwargs
|
||||||
and kwargs.get("complete_streaming_response") in kwargs
|
|
||||||
):
|
):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Model not in litellm model cost map. Add custom pricing - https://docs.litellm.ai/docs/proxy/custom_pricing"
|
f"Model not in litellm model cost map. Add custom pricing - https://docs.litellm.ai/docs/proxy/custom_pricing"
|
||||||
|
@ -1150,9 +1153,9 @@ async def update_database(
|
||||||
payload["spend"] = response_cost
|
payload["spend"] = response_cost
|
||||||
if prisma_client is not None:
|
if prisma_client is not None:
|
||||||
await prisma_client.insert_data(data=payload, table_name="spend")
|
await prisma_client.insert_data(data=payload, table_name="spend")
|
||||||
|
|
||||||
elif custom_db_client is not None:
|
elif custom_db_client is not None:
|
||||||
await custom_db_client.insert_data(payload, table_name="spend")
|
await custom_db_client.insert_data(payload, table_name="spend")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
verbose_proxy_logger.info(f"Update Spend Logs DB failed to execute")
|
verbose_proxy_logger.info(f"Update Spend Logs DB failed to execute")
|
||||||
|
|
||||||
|
@ -2407,6 +2410,9 @@ async def completion(
|
||||||
data["metadata"] = {}
|
data["metadata"] = {}
|
||||||
data["metadata"]["user_api_key"] = user_api_key_dict.api_key
|
data["metadata"]["user_api_key"] = user_api_key_dict.api_key
|
||||||
data["metadata"]["user_api_key_metadata"] = user_api_key_dict.metadata
|
data["metadata"]["user_api_key_metadata"] = user_api_key_dict.metadata
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
@ -2578,6 +2584,9 @@ async def chat_completion(
|
||||||
if "metadata" not in data:
|
if "metadata" not in data:
|
||||||
data["metadata"] = {}
|
data["metadata"] = {}
|
||||||
data["metadata"]["user_api_key"] = user_api_key_dict.api_key
|
data["metadata"]["user_api_key"] = user_api_key_dict.api_key
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
@ -2811,6 +2820,9 @@ async def embeddings(
|
||||||
"authorization", None
|
"authorization", None
|
||||||
) # do not store the original `sk-..` api key in the db
|
) # do not store the original `sk-..` api key in the db
|
||||||
data["metadata"]["headers"] = _headers
|
data["metadata"]["headers"] = _headers
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
@ -2985,6 +2997,9 @@ async def image_generation(
|
||||||
"authorization", None
|
"authorization", None
|
||||||
) # do not store the original `sk-..` api key in the db
|
) # do not store the original `sk-..` api key in the db
|
||||||
data["metadata"]["headers"] = _headers
|
data["metadata"]["headers"] = _headers
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
@ -3143,6 +3158,9 @@ async def moderations(
|
||||||
"authorization", None
|
"authorization", None
|
||||||
) # do not store the original `sk-..` api key in the db
|
) # do not store the original `sk-..` api key in the db
|
||||||
data["metadata"]["headers"] = _headers
|
data["metadata"]["headers"] = _headers
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
@ -4052,7 +4070,12 @@ async def view_spend_logs(
|
||||||
tags=["Budget & Spend Tracking"],
|
tags=["Budget & Spend Tracking"],
|
||||||
dependencies=[Depends(user_api_key_auth)],
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
)
|
)
|
||||||
async def global_spend_logs():
|
async def global_spend_logs(
|
||||||
|
api_key: str = fastapi.Query(
|
||||||
|
default=None,
|
||||||
|
description="API Key to get global spend (spend per day for last 30d). Admin-only endpoint",
|
||||||
|
)
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
[BETA] This is a beta endpoint. It will change.
|
[BETA] This is a beta endpoint. It will change.
|
||||||
|
|
||||||
|
@ -4061,12 +4084,34 @@ async def global_spend_logs():
|
||||||
More efficient implementation of /spend/logs, by creating a view over the spend logs table.
|
More efficient implementation of /spend/logs, by creating a view over the spend logs table.
|
||||||
"""
|
"""
|
||||||
global prisma_client
|
global prisma_client
|
||||||
|
if prisma_client is None:
|
||||||
|
raise ProxyException(
|
||||||
|
message="Prisma Client is not initialized",
|
||||||
|
type="internal_error",
|
||||||
|
param="None",
|
||||||
|
code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
if api_key is None:
|
||||||
sql_query = """SELECT * FROM "MonthlyGlobalSpend";"""
|
sql_query = """SELECT * FROM "MonthlyGlobalSpend";"""
|
||||||
|
|
||||||
response = await prisma_client.db.query_raw(query=sql_query)
|
response = await prisma_client.db.query_raw(query=sql_query)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
else:
|
||||||
|
sql_query = (
|
||||||
|
"""
|
||||||
|
SELECT * FROM "MonthlyGlobalSpendPerKey"
|
||||||
|
WHERE "api_key" = '"""
|
||||||
|
+ api_key
|
||||||
|
+ """'
|
||||||
|
ORDER BY "date";
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await prisma_client.db.query_raw(query=sql_query)
|
||||||
|
|
||||||
|
return response
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
|
@ -4096,6 +4141,28 @@ async def global_spend_keys(
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/global/spend/end_users",
|
||||||
|
tags=["Budget & Spend Tracking"],
|
||||||
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
|
)
|
||||||
|
async def global_spend_end_users():
|
||||||
|
"""
|
||||||
|
[BETA] This is a beta endpoint. It will change.
|
||||||
|
|
||||||
|
Use this to get the top 'n' keys with the highest spend, ordered by spend.
|
||||||
|
"""
|
||||||
|
global prisma_client
|
||||||
|
|
||||||
|
if prisma_client is None:
|
||||||
|
raise HTTPException(status_code=500, detail={"error": "No db connected"})
|
||||||
|
sql_query = f"""SELECT * FROM "Last30dTopEndUsersSpend";"""
|
||||||
|
|
||||||
|
response = await prisma_client.db.query_raw(query=sql_query)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/global/spend/models",
|
"/global/spend/models",
|
||||||
tags=["Budget & Spend Tracking"],
|
tags=["Budget & Spend Tracking"],
|
||||||
|
@ -4124,6 +4191,19 @@ async def global_spend_models(
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/global/predict/spend/logs",
|
||||||
|
tags=["Budget & Spend Tracking"],
|
||||||
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
|
)
|
||||||
|
async def global_predict_spend_logs(request: Request):
|
||||||
|
from litellm.proxy.enterprise.utils import _forecast_daily_cost
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
data = data.get("data")
|
||||||
|
return _forecast_daily_cost(data)
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/daily_metrics",
|
"/daily_metrics",
|
||||||
summary="Get daily spend metrics",
|
summary="Get daily spend metrics",
|
||||||
|
@ -4312,6 +4392,14 @@ async def user_info(
|
||||||
default=False,
|
default=False,
|
||||||
description="set to true to View all users. When using view_all, don't pass user_id",
|
description="set to true to View all users. When using view_all, don't pass user_id",
|
||||||
),
|
),
|
||||||
|
page: Optional[int] = fastapi.Query(
|
||||||
|
default=0,
|
||||||
|
description="Page number for pagination. Only use when view_all is true",
|
||||||
|
),
|
||||||
|
page_size: Optional[int] = fastapi.Query(
|
||||||
|
default=25,
|
||||||
|
description="Number of items per page. Only use when view_all is true",
|
||||||
|
),
|
||||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
|
@ -4333,8 +4421,14 @@ async def user_info(
|
||||||
if user_id is not None:
|
if user_id is not None:
|
||||||
user_info = await prisma_client.get_data(user_id=user_id)
|
user_info = await prisma_client.get_data(user_id=user_id)
|
||||||
elif view_all == True:
|
elif view_all == True:
|
||||||
|
if page is None:
|
||||||
|
page = 0
|
||||||
|
if page_size is None:
|
||||||
|
page_size = 25
|
||||||
|
offset = (page) * page_size # default is 0
|
||||||
|
limit = page_size # default is 10
|
||||||
user_info = await prisma_client.get_data(
|
user_info = await prisma_client.get_data(
|
||||||
table_name="user", query_type="find_all"
|
table_name="user", query_type="find_all", offset=offset, limit=limit
|
||||||
)
|
)
|
||||||
return user_info
|
return user_info
|
||||||
else:
|
else:
|
||||||
|
@ -4455,31 +4549,42 @@ async def user_update(data: UpdateUserRequest):
|
||||||
non_default_values[k] = v
|
non_default_values[k] = v
|
||||||
|
|
||||||
## ADD USER, IF NEW ##
|
## ADD USER, IF NEW ##
|
||||||
if data.user_id is not None and len(data.user_id) == 0:
|
verbose_proxy_logger.debug(f"/user/update: Received data = {data}")
|
||||||
|
if data.user_id is not None and len(data.user_id) > 0:
|
||||||
non_default_values["user_id"] = data.user_id # type: ignore
|
non_default_values["user_id"] = data.user_id # type: ignore
|
||||||
await prisma_client.update_data(
|
verbose_proxy_logger.debug(f"In update user, user_id condition block.")
|
||||||
|
response = await prisma_client.update_data(
|
||||||
user_id=data.user_id,
|
user_id=data.user_id,
|
||||||
data=non_default_values,
|
data=non_default_values,
|
||||||
table_name="user",
|
table_name="user",
|
||||||
)
|
)
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
f"received response from updating prisma client. response={response}"
|
||||||
|
)
|
||||||
elif data.user_email is not None:
|
elif data.user_email is not None:
|
||||||
non_default_values["user_id"] = str(uuid.uuid4())
|
non_default_values["user_id"] = str(uuid.uuid4())
|
||||||
non_default_values["user_email"] = data.user_email
|
non_default_values["user_email"] = data.user_email
|
||||||
## user email is not unique acc. to prisma schema -> future improvement
|
## user email is not unique acc. to prisma schema -> future improvement
|
||||||
### for now: check if it exists in db, if not - insert it
|
### for now: check if it exists in db, if not - insert it
|
||||||
existing_user_row = await prisma_client.get_data(
|
existing_user_rows = await prisma_client.get_data(
|
||||||
key_val={"user_email": data.user_email},
|
key_val={"user_email": data.user_email},
|
||||||
table_name="user",
|
table_name="user",
|
||||||
query_type="find_all",
|
query_type="find_all",
|
||||||
)
|
)
|
||||||
if existing_user_row is None or (
|
if existing_user_rows is None or (
|
||||||
isinstance(existing_user_row, list) and len(existing_user_row) == 0
|
isinstance(existing_user_rows, list) and len(existing_user_rows) == 0
|
||||||
):
|
):
|
||||||
await prisma_client.insert_data(
|
response = await prisma_client.insert_data(
|
||||||
data=non_default_values, table_name="user"
|
data=non_default_values, table_name="user"
|
||||||
)
|
)
|
||||||
|
elif isinstance(existing_user_rows, list) and len(existing_user_rows) > 0:
|
||||||
return non_default_values
|
for existing_user in existing_user_rows:
|
||||||
|
response = await prisma_client.update_data(
|
||||||
|
user_id=existing_user.user_id,
|
||||||
|
data=non_default_values,
|
||||||
|
table_name="user",
|
||||||
|
)
|
||||||
|
return response
|
||||||
# update based on remaining passed in values
|
# update based on remaining passed in values
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
@ -5052,6 +5157,117 @@ async def team_member_add(
|
||||||
return team_row
|
return team_row
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/team/member_delete",
|
||||||
|
tags=["team management"],
|
||||||
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
|
)
|
||||||
|
async def team_member_delete(
|
||||||
|
data: TeamMemberDeleteRequest,
|
||||||
|
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
[BETA]
|
||||||
|
|
||||||
|
delete members (either via user_email or user_id) from a team
|
||||||
|
|
||||||
|
If user doesn't exist, an exception will be raised
|
||||||
|
```
|
||||||
|
curl -X POST 'http://0.0.0.0:8000/team/update' \
|
||||||
|
|
||||||
|
-H 'Authorization: Bearer sk-1234' \
|
||||||
|
|
||||||
|
-H 'Content-Type: application/json' \
|
||||||
|
|
||||||
|
-D '{
|
||||||
|
"team_id": "45e3e396-ee08-4a61-a88e-16b3ce7e0849",
|
||||||
|
"member": {"role": "user", "user_id": "krrish247652@berri.ai"}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
if prisma_client is None:
|
||||||
|
raise HTTPException(status_code=500, detail={"error": "No db connected"})
|
||||||
|
|
||||||
|
if data.team_id is None:
|
||||||
|
raise HTTPException(status_code=400, detail={"error": "No team id passed in"})
|
||||||
|
|
||||||
|
if data.user_id is None and data.user_email is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail={"error": "Either user_id or user_email needs to be passed in"},
|
||||||
|
)
|
||||||
|
|
||||||
|
existing_team_row = await prisma_client.get_data( # type: ignore
|
||||||
|
team_id=data.team_id, table_name="team", query_type="find_unique"
|
||||||
|
)
|
||||||
|
|
||||||
|
## DELETE MEMBER FROM TEAM
|
||||||
|
new_team_members = []
|
||||||
|
for m in existing_team_row.members_with_roles:
|
||||||
|
if (
|
||||||
|
data.user_id is not None
|
||||||
|
and m["user_id"] is not None
|
||||||
|
and data.user_id == m["user_id"]
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
elif (
|
||||||
|
data.user_email is not None
|
||||||
|
and m["user_email"] is not None
|
||||||
|
and data.user_email == m["user_email"]
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
new_team_members.append(m)
|
||||||
|
existing_team_row.members_with_roles = new_team_members
|
||||||
|
complete_team_data = LiteLLM_TeamTable(
|
||||||
|
**existing_team_row.model_dump(),
|
||||||
|
)
|
||||||
|
|
||||||
|
team_row = await prisma_client.update_data(
|
||||||
|
update_key_values=complete_team_data.json(exclude_none=True),
|
||||||
|
data=complete_team_data.json(exclude_none=True),
|
||||||
|
table_name="team",
|
||||||
|
team_id=data.team_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
## DELETE TEAM ID from USER ROW, IF EXISTS ##
|
||||||
|
# get user row
|
||||||
|
key_val = {}
|
||||||
|
if data.user_id is not None:
|
||||||
|
key_val["user_id"] = data.user_id
|
||||||
|
elif data.user_email is not None:
|
||||||
|
key_val["user_email"] = data.user_email
|
||||||
|
existing_user_rows = await prisma_client.get_data(
|
||||||
|
key_val=key_val,
|
||||||
|
table_name="user",
|
||||||
|
query_type="find_all",
|
||||||
|
)
|
||||||
|
user_data = { # type: ignore
|
||||||
|
"teams": [],
|
||||||
|
"models": team_row["data"].models,
|
||||||
|
}
|
||||||
|
if existing_user_rows is not None and (
|
||||||
|
isinstance(existing_user_rows, list) and len(existing_user_rows) > 0
|
||||||
|
):
|
||||||
|
for existing_user in existing_user_rows:
|
||||||
|
team_list = []
|
||||||
|
if hasattr(existing_user, "teams"):
|
||||||
|
team_list = existing_user.teams
|
||||||
|
team_list.remove(data.team_id)
|
||||||
|
user_data["user_id"] = existing_user.user_id
|
||||||
|
await prisma_client.update_data(
|
||||||
|
user_id=existing_user.user_id,
|
||||||
|
data=user_data,
|
||||||
|
update_key_values_custom_query={
|
||||||
|
"teams": {
|
||||||
|
"set": [team_row["team_id"]],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
table_name="user",
|
||||||
|
)
|
||||||
|
|
||||||
|
return team_row["data"]
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
@router.post(
|
||||||
"/team/delete", tags=["team management"], dependencies=[Depends(user_api_key_auth)]
|
"/team/delete", tags=["team management"], dependencies=[Depends(user_api_key_auth)]
|
||||||
)
|
)
|
||||||
|
@ -5535,6 +5751,9 @@ async def async_queue_request(
|
||||||
"authorization", None
|
"authorization", None
|
||||||
) # do not store the original `sk-..` api key in the db
|
) # do not store the original `sk-..` api key in the db
|
||||||
data["metadata"]["headers"] = _headers
|
data["metadata"]["headers"] = _headers
|
||||||
|
data["metadata"]["user_api_key_alias"] = getattr(
|
||||||
|
user_api_key_dict, "key_alias", None
|
||||||
|
)
|
||||||
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
data["metadata"]["user_api_key_user_id"] = user_api_key_dict.user_id
|
||||||
data["metadata"]["user_api_key_team_id"] = getattr(
|
data["metadata"]["user_api_key_team_id"] = getattr(
|
||||||
user_api_key_dict, "team_id", None
|
user_api_key_dict, "team_id", None
|
||||||
|
|
|
@ -97,8 +97,9 @@ model LiteLLM_SpendLogs {
|
||||||
cache_hit String @default("")
|
cache_hit String @default("")
|
||||||
cache_key String @default("")
|
cache_key String @default("")
|
||||||
request_tags Json @default("[]")
|
request_tags Json @default("[]")
|
||||||
|
team_id String?
|
||||||
|
end_user String?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Beta - allow team members to request access to a model
|
// Beta - allow team members to request access to a model
|
||||||
model LiteLLM_UserNotifications {
|
model LiteLLM_UserNotifications {
|
||||||
request_id String @unique
|
request_id String @unique
|
||||||
|
|
|
@ -542,6 +542,100 @@ class PrismaClient:
|
||||||
|
|
||||||
print("MonthlyGlobalSpend Created!") # noqa
|
print("MonthlyGlobalSpend Created!") # noqa
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.db.query_raw("""SELECT 1 FROM "Last30dKeysBySpend" LIMIT 1""")
|
||||||
|
print("Last30dKeysBySpend Exists!") # noqa
|
||||||
|
except Exception as e:
|
||||||
|
sql_query = """
|
||||||
|
CREATE OR REPLACE VIEW "Last30dKeysBySpend" AS
|
||||||
|
SELECT
|
||||||
|
L."api_key",
|
||||||
|
V."key_alias",
|
||||||
|
V."key_name",
|
||||||
|
SUM(L."spend") AS total_spend
|
||||||
|
FROM
|
||||||
|
"LiteLLM_SpendLogs" L
|
||||||
|
LEFT JOIN
|
||||||
|
"LiteLLM_VerificationToken" V
|
||||||
|
ON
|
||||||
|
L."api_key" = V."token"
|
||||||
|
WHERE
|
||||||
|
L."startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||||
|
GROUP BY
|
||||||
|
L."api_key", V."key_alias", V."key_name"
|
||||||
|
ORDER BY
|
||||||
|
total_spend DESC;
|
||||||
|
"""
|
||||||
|
await self.db.execute_raw(query=sql_query)
|
||||||
|
|
||||||
|
print("Last30dKeysBySpend Created!") # noqa
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.db.query_raw("""SELECT 1 FROM "Last30dModelsBySpend" LIMIT 1""")
|
||||||
|
print("Last30dModelsBySpend Exists!") # noqa
|
||||||
|
except Exception as e:
|
||||||
|
sql_query = """
|
||||||
|
CREATE OR REPLACE VIEW "Last30dModelsBySpend" AS
|
||||||
|
SELECT
|
||||||
|
"model",
|
||||||
|
SUM("spend") AS total_spend
|
||||||
|
FROM
|
||||||
|
"LiteLLM_SpendLogs"
|
||||||
|
WHERE
|
||||||
|
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||||
|
AND "model" != ''
|
||||||
|
GROUP BY
|
||||||
|
"model"
|
||||||
|
ORDER BY
|
||||||
|
total_spend DESC;
|
||||||
|
"""
|
||||||
|
await self.db.execute_raw(query=sql_query)
|
||||||
|
|
||||||
|
print("Last30dModelsBySpend Created!") # noqa
|
||||||
|
try:
|
||||||
|
await self.db.query_raw(
|
||||||
|
"""SELECT 1 FROM "MonthlyGlobalSpendPerKey" LIMIT 1"""
|
||||||
|
)
|
||||||
|
print("MonthlyGlobalSpendPerKey Exists!") # noqa
|
||||||
|
except Exception as e:
|
||||||
|
sql_query = """
|
||||||
|
CREATE OR REPLACE VIEW "MonthlyGlobalSpendPerKey" AS
|
||||||
|
SELECT
|
||||||
|
DATE("startTime") AS date,
|
||||||
|
SUM("spend") AS spend,
|
||||||
|
api_key as api_key
|
||||||
|
FROM
|
||||||
|
"LiteLLM_SpendLogs"
|
||||||
|
WHERE
|
||||||
|
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||||
|
GROUP BY
|
||||||
|
DATE("startTime"),
|
||||||
|
api_key;
|
||||||
|
"""
|
||||||
|
await self.db.execute_raw(query=sql_query)
|
||||||
|
|
||||||
|
print("MonthlyGlobalSpendPerKey Created!") # noqa
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.db.query_raw(
|
||||||
|
"""SELECT 1 FROM "Last30dTopEndUsersSpend" LIMIT 1"""
|
||||||
|
)
|
||||||
|
print("Last30dTopEndUsersSpend Exists!") # noqa
|
||||||
|
except Exception as e:
|
||||||
|
sql_query = """
|
||||||
|
CREATE VIEW "Last30dTopEndUsersSpend" AS
|
||||||
|
SELECT end_user, COUNT(*) AS total_events, SUM(spend) AS total_spend
|
||||||
|
FROM "LiteLLM_SpendLogs"
|
||||||
|
WHERE end_user <> '' AND end_user <> user
|
||||||
|
AND "startTime" >= CURRENT_DATE - INTERVAL '30 days'
|
||||||
|
GROUP BY end_user
|
||||||
|
ORDER BY total_spend DESC
|
||||||
|
LIMIT 100;
|
||||||
|
"""
|
||||||
|
await self.db.execute_raw(query=sql_query)
|
||||||
|
|
||||||
|
print("Last30dTopEndUsersSpend Created!") # noqa
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
@backoff.on_exception(
|
@backoff.on_exception(
|
||||||
|
@ -613,6 +707,10 @@ class PrismaClient:
|
||||||
query_type: Literal["find_unique", "find_all"] = "find_unique",
|
query_type: Literal["find_unique", "find_all"] = "find_unique",
|
||||||
expires: Optional[datetime] = None,
|
expires: Optional[datetime] = None,
|
||||||
reset_at: Optional[datetime] = None,
|
reset_at: Optional[datetime] = None,
|
||||||
|
offset: Optional[int] = None, # pagination, what row number to start from
|
||||||
|
limit: Optional[
|
||||||
|
int
|
||||||
|
] = None, # pagination, number of rows to getch when find_all==True
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
response: Any = None
|
response: Any = None
|
||||||
|
@ -748,7 +846,7 @@ class PrismaClient:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
response = await self.db.litellm_usertable.find_many( # type: ignore
|
response = await self.db.litellm_usertable.find_many( # type: ignore
|
||||||
order={"spend": "desc"},
|
order={"spend": "desc"}, take=limit, skip=offset
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
elif table_name == "spend":
|
elif table_name == "spend":
|
||||||
|
@ -1034,7 +1132,7 @@ class PrismaClient:
|
||||||
+ f"DB User Table - update succeeded {update_user_row}"
|
+ f"DB User Table - update succeeded {update_user_row}"
|
||||||
+ "\033[0m"
|
+ "\033[0m"
|
||||||
)
|
)
|
||||||
return {"user_id": user_id, "data": db_data}
|
return {"user_id": user_id, "data": update_user_row}
|
||||||
elif (
|
elif (
|
||||||
team_id is not None
|
team_id is not None
|
||||||
or (table_name is not None and table_name == "team")
|
or (table_name is not None and table_name == "team")
|
||||||
|
@ -1473,7 +1571,12 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
|
||||||
"startTime": start_time,
|
"startTime": start_time,
|
||||||
"endTime": end_time,
|
"endTime": end_time,
|
||||||
"model": kwargs.get("model", ""),
|
"model": kwargs.get("model", ""),
|
||||||
"user": kwargs.get("user", ""),
|
"user": kwargs.get("litellm_params", {})
|
||||||
|
.get("metadata", {})
|
||||||
|
.get("user_api_key_user_id", ""),
|
||||||
|
"team_id": kwargs.get("litellm_params", {})
|
||||||
|
.get("metadata", {})
|
||||||
|
.get("user_api_key_team_id", ""),
|
||||||
"metadata": metadata,
|
"metadata": metadata,
|
||||||
"cache_key": cache_key,
|
"cache_key": cache_key,
|
||||||
"spend": kwargs.get("response_cost", 0),
|
"spend": kwargs.get("response_cost", 0),
|
||||||
|
@ -1481,6 +1584,7 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
|
||||||
"prompt_tokens": usage.get("prompt_tokens", 0),
|
"prompt_tokens": usage.get("prompt_tokens", 0),
|
||||||
"completion_tokens": usage.get("completion_tokens", 0),
|
"completion_tokens": usage.get("completion_tokens", 0),
|
||||||
"request_tags": metadata.get("tags", []),
|
"request_tags": metadata.get("tags", []),
|
||||||
|
"end_user": kwargs.get("user", ""),
|
||||||
}
|
}
|
||||||
|
|
||||||
verbose_proxy_logger.debug(f"SpendTable: created payload - payload: {payload}\n\n")
|
verbose_proxy_logger.debug(f"SpendTable: created payload - payload: {payload}\n\n")
|
||||||
|
|
|
@ -336,6 +336,52 @@ def test_gemini_pro_vision():
|
||||||
# test_gemini_pro_vision()
|
# test_gemini_pro_vision()
|
||||||
|
|
||||||
|
|
||||||
|
def encode_image(image_path):
|
||||||
|
import base64
|
||||||
|
|
||||||
|
with open(image_path, "rb") as image_file:
|
||||||
|
return base64.b64encode(image_file.read()).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(
|
||||||
|
reason="we already test gemini-pro-vision, this is just another way to pass images"
|
||||||
|
)
|
||||||
|
def test_gemini_pro_vision_base64():
|
||||||
|
try:
|
||||||
|
load_vertex_ai_credentials()
|
||||||
|
litellm.set_verbose = True
|
||||||
|
litellm.num_retries = 3
|
||||||
|
image_path = "cached_logo.jpg"
|
||||||
|
# Getting the base64 string
|
||||||
|
base64_image = encode_image(image_path)
|
||||||
|
resp = litellm.completion(
|
||||||
|
model="vertex_ai/gemini-pro-vision",
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{"type": "text", "text": "Whats in this image?"},
|
||||||
|
{
|
||||||
|
"type": "image_url",
|
||||||
|
"image_url": {
|
||||||
|
"url": "data:image/jpeg;base64," + base64_image
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
print(resp)
|
||||||
|
|
||||||
|
prompt_tokens = resp.usage.prompt_tokens
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if "500 Internal error encountered.'" in str(e):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
pytest.fail(f"An exception occurred - {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def test_gemini_pro_function_calling():
|
def test_gemini_pro_function_calling():
|
||||||
load_vertex_ai_credentials()
|
load_vertex_ai_credentials()
|
||||||
tools = [
|
tools = [
|
||||||
|
|
|
@ -107,6 +107,31 @@ def test_completion_mistral_api():
|
||||||
pytest.fail(f"Error occurred: {e}")
|
pytest.fail(f"Error occurred: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(
|
||||||
|
reason="Since we already test mistral/mistral-tiny in test_completion_mistral_api. This is only for locally verifying azure mistral works"
|
||||||
|
)
|
||||||
|
def test_completion_mistral_azure():
|
||||||
|
try:
|
||||||
|
litellm.set_verbose = True
|
||||||
|
response = completion(
|
||||||
|
model="mistral/Mistral-large-nmefg",
|
||||||
|
api_key=os.environ["MISTRAL_AZURE_API_KEY"],
|
||||||
|
api_base=os.environ["MISTRAL_AZURE_API_BASE"],
|
||||||
|
max_tokens=5,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Hi from litellm",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# Add any assertions here to check the response
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
pytest.fail(f"Error occurred: {e}")
|
||||||
|
|
||||||
|
|
||||||
# test_completion_mistral_api()
|
# test_completion_mistral_api()
|
||||||
|
|
||||||
|
|
||||||
|
@ -267,7 +292,7 @@ def test_completion_gpt4_vision():
|
||||||
|
|
||||||
|
|
||||||
def test_completion_azure_gpt4_vision():
|
def test_completion_azure_gpt4_vision():
|
||||||
# azure/gpt-4, vision takes 5seconds to respond
|
# azure/gpt-4, vision takes 5 seconds to respond
|
||||||
try:
|
try:
|
||||||
litellm.set_verbose = True
|
litellm.set_verbose = True
|
||||||
response = completion(
|
response = completion(
|
||||||
|
|
|
@ -4893,7 +4893,7 @@ def get_optional_params(
|
||||||
extra_body # openai client supports `extra_body` param
|
extra_body # openai client supports `extra_body` param
|
||||||
)
|
)
|
||||||
else: # assume passing in params for openai/azure openai
|
else: # assume passing in params for openai/azure openai
|
||||||
print_verbose(f"UNMAPPED PROVIDER, ASSUMING IT'S OPENAI/AZUREs")
|
print_verbose(f"UNMAPPED PROVIDER, ASSUMING IT'S OPENAI/AZURE")
|
||||||
supported_params = [
|
supported_params = [
|
||||||
"functions",
|
"functions",
|
||||||
"function_call",
|
"function_call",
|
||||||
|
@ -5015,8 +5015,21 @@ def get_llm_provider(
|
||||||
dynamic_api_key = get_secret("GROQ_API_KEY")
|
dynamic_api_key = get_secret("GROQ_API_KEY")
|
||||||
elif custom_llm_provider == "mistral":
|
elif custom_llm_provider == "mistral":
|
||||||
# mistral is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.mistral.ai
|
# mistral is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.mistral.ai
|
||||||
api_base = api_base or "https://api.mistral.ai/v1"
|
api_base = (
|
||||||
dynamic_api_key = get_secret("MISTRAL_API_KEY")
|
api_base
|
||||||
|
or get_secret("MISTRAL_AZURE_API_BASE") # for Azure AI Mistral
|
||||||
|
or "https://api.mistral.ai/v1"
|
||||||
|
)
|
||||||
|
# if api_base does not end with /v1 we add it
|
||||||
|
if api_base is not None and not api_base.endswith(
|
||||||
|
"/v1"
|
||||||
|
): # Mistral always needs a /v1 at the end
|
||||||
|
api_base = api_base + "/v1"
|
||||||
|
dynamic_api_key = (
|
||||||
|
api_key
|
||||||
|
or get_secret("MISTRAL_AZURE_API_KEY") # for Azure AI Mistral
|
||||||
|
or get_secret("MISTRAL_API_KEY")
|
||||||
|
)
|
||||||
elif custom_llm_provider == "voyage":
|
elif custom_llm_provider == "voyage":
|
||||||
# voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1
|
# voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1
|
||||||
api_base = "https://api.voyageai.com/v1"
|
api_base = "https://api.voyageai.com/v1"
|
||||||
|
@ -5511,7 +5524,7 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
}
|
}
|
||||||
## EXTRACT LLM PROVIDER - if model name provided
|
## EXTRACT LLM PROVIDER - if model name provided
|
||||||
try:
|
try:
|
||||||
custom_llm_provider = get_llm_provider(model=model)
|
_, custom_llm_provider, _, _ = get_llm_provider(model=model)
|
||||||
except:
|
except:
|
||||||
custom_llm_provider = None
|
custom_llm_provider = None
|
||||||
# # check if llm provider part of model name
|
# # check if llm provider part of model name
|
||||||
|
@ -5605,7 +5618,7 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
## openai - chatcompletion + text completion
|
## openai - chatcompletion + text completion
|
||||||
if (
|
if (
|
||||||
model in litellm.open_ai_chat_completion_models
|
model in litellm.open_ai_chat_completion_models
|
||||||
or litellm.open_ai_text_completion_models
|
or model in litellm.open_ai_text_completion_models
|
||||||
):
|
):
|
||||||
if "OPENAI_API_KEY" in os.environ:
|
if "OPENAI_API_KEY" in os.environ:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
|
|
|
@ -424,6 +424,22 @@
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true
|
"supports_function_calling": true
|
||||||
},
|
},
|
||||||
|
"azure/mistral-large-latest": {
|
||||||
|
"max_tokens": 32000,
|
||||||
|
"input_cost_per_token": 0.000008,
|
||||||
|
"output_cost_per_token": 0.000024,
|
||||||
|
"litellm_provider": "azure",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true
|
||||||
|
},
|
||||||
|
"azure/mistral-large-2402": {
|
||||||
|
"max_tokens": 32000,
|
||||||
|
"input_cost_per_token": 0.000008,
|
||||||
|
"output_cost_per_token": 0.000024,
|
||||||
|
"litellm_provider": "azure",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true
|
||||||
|
},
|
||||||
"azure/ada": {
|
"azure/ada": {
|
||||||
"max_tokens": 8191,
|
"max_tokens": 8191,
|
||||||
"input_cost_per_token": 0.0000001,
|
"input_cost_per_token": 0.0000001,
|
||||||
|
@ -564,6 +580,20 @@
|
||||||
"litellm_provider": "mistral",
|
"litellm_provider": "mistral",
|
||||||
"mode": "embedding"
|
"mode": "embedding"
|
||||||
},
|
},
|
||||||
|
"groq/llama2-70b-4096": {
|
||||||
|
"max_tokens": 4096,
|
||||||
|
"input_cost_per_token": 0.00000070,
|
||||||
|
"output_cost_per_token": 0.00000080,
|
||||||
|
"litellm_provider": "groq",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"groq/mixtral-8x7b-32768": {
|
||||||
|
"max_tokens": 32768,
|
||||||
|
"input_cost_per_token": 0.00000027,
|
||||||
|
"output_cost_per_token": 0.00000027,
|
||||||
|
"litellm_provider": "groq",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
"claude-instant-1.2": {
|
"claude-instant-1.2": {
|
||||||
"max_tokens": 100000,
|
"max_tokens": 100000,
|
||||||
"max_output_tokens": 8191,
|
"max_output_tokens": 8191,
|
||||||
|
@ -2040,6 +2070,36 @@
|
||||||
"output_cost_per_token": 0.00000028,
|
"output_cost_per_token": 0.00000028,
|
||||||
"litellm_provider": "perplexity",
|
"litellm_provider": "perplexity",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-small-chat": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.00000007,
|
||||||
|
"output_cost_per_token": 0.00000028,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-small-online": {
|
||||||
|
"max_tokens": 12000,
|
||||||
|
"input_cost_per_token": 0,
|
||||||
|
"output_cost_per_token": 0.00000028,
|
||||||
|
"input_cost_per_request": 0.005,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-medium-chat": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.0000006,
|
||||||
|
"output_cost_per_token": 0.0000018,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"perplexity/sonar-medium-online": {
|
||||||
|
"max_tokens": 12000,
|
||||||
|
"input_cost_per_token": 0,
|
||||||
|
"output_cost_per_token": 0.0000018,
|
||||||
|
"input_cost_per_request": 0.005,
|
||||||
|
"litellm_provider": "perplexity",
|
||||||
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
||||||
"max_tokens": 16384,
|
"max_tokens": 16384,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "1.28.0"
|
version = "1.28.7"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -74,7 +74,7 @@ requires = ["poetry-core", "wheel"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
version = "1.28.0"
|
version = "1.28.7"
|
||||||
version_files = [
|
version_files = [
|
||||||
"pyproject.toml:^version"
|
"pyproject.toml:^version"
|
||||||
]
|
]
|
||||||
|
|
|
@ -97,6 +97,8 @@ model LiteLLM_SpendLogs {
|
||||||
cache_hit String @default("")
|
cache_hit String @default("")
|
||||||
cache_key String @default("")
|
cache_key String @default("")
|
||||||
request_tags Json @default("[]")
|
request_tags Json @default("[]")
|
||||||
|
team_id String?
|
||||||
|
end_user String?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Beta - allow team members to request access to a model
|
// Beta - allow team members to request access to a model
|
||||||
|
|
|
@ -35,6 +35,25 @@ async def new_user(session, i, user_id=None, budget=None, budget_duration=None):
|
||||||
return await response.json()
|
return await response.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_member(session, i, team_id, user_id):
|
||||||
|
url = "http://0.0.0.0:4000/team/member_delete"
|
||||||
|
headers = {"Authorization": "Bearer sk-1234", "Content-Type": "application/json"}
|
||||||
|
data = {"team_id": team_id, "user_id": user_id}
|
||||||
|
|
||||||
|
async with session.post(url, headers=headers, json=data) as response:
|
||||||
|
status = response.status
|
||||||
|
response_text = await response.text()
|
||||||
|
|
||||||
|
print(f"Response {i} (Status code: {status}):")
|
||||||
|
print(response_text)
|
||||||
|
print()
|
||||||
|
|
||||||
|
if status != 200:
|
||||||
|
raise Exception(f"Request {i} did not return a 200 status code: {status}")
|
||||||
|
|
||||||
|
return await response.json()
|
||||||
|
|
||||||
|
|
||||||
async def generate_key(
|
async def generate_key(
|
||||||
session,
|
session,
|
||||||
i,
|
i,
|
||||||
|
@ -290,3 +309,45 @@ async def test_team_delete():
|
||||||
response = await chat_completion(session=session, key=key)
|
response = await chat_completion(session=session, key=key)
|
||||||
## Delete team
|
## Delete team
|
||||||
await delete_team(session=session, i=0, team_id=team_data["team_id"])
|
await delete_team(session=session, i=0, team_id=team_data["team_id"])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_member_delete():
|
||||||
|
"""
|
||||||
|
- Create team
|
||||||
|
- Add member
|
||||||
|
- Get team info (check if member in team)
|
||||||
|
- Delete member
|
||||||
|
- Get team info (check if member in team)
|
||||||
|
"""
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
# Create Team
|
||||||
|
## Create admin
|
||||||
|
admin_user = f"{uuid.uuid4()}"
|
||||||
|
await new_user(session=session, i=0, user_id=admin_user)
|
||||||
|
## Create normal user
|
||||||
|
normal_user = f"{uuid.uuid4()}"
|
||||||
|
print(f"normal_user: {normal_user}")
|
||||||
|
await new_user(session=session, i=0, user_id=normal_user)
|
||||||
|
## Create team with 1 admin and 1 user
|
||||||
|
member_list = [
|
||||||
|
{"role": "admin", "user_id": admin_user},
|
||||||
|
{"role": "user", "user_id": normal_user},
|
||||||
|
]
|
||||||
|
team_data = await new_team(session=session, i=0, member_list=member_list)
|
||||||
|
print(f"team_data: {team_data}")
|
||||||
|
member_id_list = []
|
||||||
|
for member in team_data["members_with_roles"]:
|
||||||
|
member_id_list.append(member["user_id"])
|
||||||
|
|
||||||
|
assert normal_user in member_id_list
|
||||||
|
# Delete member
|
||||||
|
updated_team_data = await delete_member(
|
||||||
|
session=session, i=0, team_id=team_data["team_id"], user_id=normal_user
|
||||||
|
)
|
||||||
|
print(f"updated_team_data: {updated_team_data}")
|
||||||
|
member_id_list = []
|
||||||
|
for member in updated_team_data["members_with_roles"]:
|
||||||
|
member_id_list.append(member["user_id"])
|
||||||
|
|
||||||
|
assert normal_user not in member_id_list
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
self.__BUILD_MANIFEST={__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},"/_error":["static/chunks/pages/_error-d6107f1aac0c574c.js"],sortedPages:["/_app","/_error"]},self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|
|
@ -0,0 +1 @@
|
||||||
|
self.__SSG_MANIFEST=new Set([]);self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{87421:function(n,e,t){Promise.resolve().then(t.t.bind(t,99646,23)),Promise.resolve().then(t.t.bind(t,63385,23))},63385:function(){},99646:function(n){n.exports={style:{fontFamily:"'__Inter_c23dc8', '__Inter_Fallback_c23dc8'",fontStyle:"normal"},className:"__className_c23dc8"}}},function(n){n.O(0,[971,69,744],function(){return n(n.s=87421)}),_N_E=n.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{32028:function(e,n,t){Promise.resolve().then(t.t.bind(t,47690,23)),Promise.resolve().then(t.t.bind(t,48955,23)),Promise.resolve().then(t.t.bind(t,5613,23)),Promise.resolve().then(t.t.bind(t,11902,23)),Promise.resolve().then(t.t.bind(t,31778,23)),Promise.resolve().then(t.t.bind(t,77831,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,69],function(){return n(35317),n(32028)}),_N_E=e.O()}]);
|
|
@ -0,0 +1 @@
|
||||||
|
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/16eb955147cb6b2f.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-12184ee6a95c1363.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-a85b2c176012d8e5.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-e1b183dda365ec86.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-096338c8e1915716.js" async="" crossorigin=""></script><title>🚅 LiteLLM</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-12184ee6a95c1363.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/a40ad0909dd7838e.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[30280,[\"303\",\"static/chunks/303-d80f23087a9e6aec.js\",\"931\",\"static/chunks/app/page-8f65fc157f538dff.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/a40ad0909dd7838e.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"kyOCJPBB9pyUfbMKCAXr-\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_c23dc8\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"🚅 LiteLLM\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-6b93c4e1d000ff14.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-a85b2c176012d8e5.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-e1b183dda365ec86.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>🚅 LiteLLM</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-6b93c4e1d000ff14.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/16eb955147cb6b2f.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[9125,[\"730\",\"static/chunks/730-1411b729a1c79695.js\",\"931\",\"static/chunks/app/page-ad3e13d2fec661b5.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/16eb955147cb6b2f.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"SP1Cm97dc_3zo4HlsJJjg\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_c23dc8\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"🚅 LiteLLM\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[30280,["303","static/chunks/303-d80f23087a9e6aec.js","931","static/chunks/app/page-8f65fc157f538dff.js"],""]
|
3:I[9125,["730","static/chunks/730-1411b729a1c79695.js","931","static/chunks/app/page-ad3e13d2fec661b5.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["kyOCJPBB9pyUfbMKCAXr-",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_c23dc8","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/a40ad0909dd7838e.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["SP1Cm97dc_3zo4HlsJJjg",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_c23dc8","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/16eb955147cb6b2f.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"🚅 LiteLLM"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"🚅 LiteLLM"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
|
@ -11,8 +11,10 @@ import ChatUI from "@/components/chat_ui";
|
||||||
import Sidebar from "../components/leftnav";
|
import Sidebar from "../components/leftnav";
|
||||||
import Usage from "../components/usage";
|
import Usage from "../components/usage";
|
||||||
import { jwtDecode } from "jwt-decode";
|
import { jwtDecode } from "jwt-decode";
|
||||||
|
import { Typography } from "antd";
|
||||||
|
|
||||||
const CreateKeyPage = () => {
|
const CreateKeyPage = () => {
|
||||||
|
const { Title, Paragraph } = Typography;
|
||||||
const [userRole, setUserRole] = useState("");
|
const [userRole, setUserRole] = useState("");
|
||||||
const [userEmail, setUserEmail] = useState<null | string>(null);
|
const [userEmail, setUserEmail] = useState<null | string>(null);
|
||||||
const [teams, setTeams] = useState<null | any[]>(null);
|
const [teams, setTeams] = useState<null | any[]>(null);
|
||||||
|
@ -41,6 +43,9 @@ const CreateKeyPage = () => {
|
||||||
const formattedUserRole = formatUserRole(decoded.user_role);
|
const formattedUserRole = formatUserRole(decoded.user_role);
|
||||||
console.log("Decoded user_role:", formattedUserRole);
|
console.log("Decoded user_role:", formattedUserRole);
|
||||||
setUserRole(formattedUserRole);
|
setUserRole(formattedUserRole);
|
||||||
|
if (formattedUserRole == "Admin Viewer") {
|
||||||
|
setPage("usage");
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
console.log("User role not defined");
|
console.log("User role not defined");
|
||||||
}
|
}
|
||||||
|
@ -66,7 +71,8 @@ const CreateKeyPage = () => {
|
||||||
if (!userRole) {
|
if (!userRole) {
|
||||||
return "Undefined Role";
|
return "Undefined Role";
|
||||||
}
|
}
|
||||||
console.log(`Received user role: ${userRole}`);
|
console.log(`Received user role: ${userRole.toLowerCase()}`);
|
||||||
|
console.log(`Received user role length: ${userRole.toLowerCase().length}`);
|
||||||
switch (userRole.toLowerCase()) {
|
switch (userRole.toLowerCase()) {
|
||||||
case "app_owner":
|
case "app_owner":
|
||||||
return "App Owner";
|
return "App Owner";
|
||||||
|
|
|
@ -138,10 +138,13 @@ const AdminPanel: React.FC<AdminPanelProps> = ({
|
||||||
console.log(`admins: ${admins?.length}`);
|
console.log(`admins: ${admins?.length}`);
|
||||||
return (
|
return (
|
||||||
<div className="w-full m-2">
|
<div className="w-full m-2">
|
||||||
<Title level={4}>Proxy Admins</Title>
|
<Title level={4}>Restricted Access</Title>
|
||||||
<Paragraph>
|
<Paragraph>
|
||||||
Add other people to just view global spend. They cannot create teams or
|
Add other people to just view spend. They cannot create keys, teams or
|
||||||
grant users access to new models.
|
grant users access to new models.{" "}
|
||||||
|
<a href="https://docs.litellm.ai/docs/proxy/ui#restrict-ui-access">
|
||||||
|
Requires SSO Setup
|
||||||
|
</a>
|
||||||
</Paragraph>
|
</Paragraph>
|
||||||
<Grid numItems={1} className="gap-2 p-0 w-full">
|
<Grid numItems={1} className="gap-2 p-0 w-full">
|
||||||
<Col numColSpan={1}>
|
<Col numColSpan={1}>
|
||||||
|
|
|
@ -21,6 +21,7 @@ import {
|
||||||
import { modelAvailableCall } from "./networking";
|
import { modelAvailableCall } from "./networking";
|
||||||
import openai from "openai";
|
import openai from "openai";
|
||||||
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
|
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
|
||||||
|
import { Typography } from "antd";
|
||||||
|
|
||||||
interface ChatUIProps {
|
interface ChatUIProps {
|
||||||
accessToken: string | null;
|
accessToken: string | null;
|
||||||
|
@ -145,6 +146,16 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
||||||
setInputMessage("");
|
setInputMessage("");
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (userRole && userRole == "Admin Viewer") {
|
||||||
|
const { Title, Paragraph } = Typography;
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Title level={1}>Access Denied</Title>
|
||||||
|
<Paragraph>Ask your proxy admin for access to test models</Paragraph>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{ width: "100%", position: "relative" }}>
|
<div style={{ width: "100%", position: "relative" }}>
|
||||||
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
||||||
|
|
|
@ -16,6 +16,34 @@ const Sidebar: React.FC<SidebarProps> = ({
|
||||||
userRole,
|
userRole,
|
||||||
defaultSelectedKey,
|
defaultSelectedKey,
|
||||||
}) => {
|
}) => {
|
||||||
|
if (userRole == "Admin Viewer") {
|
||||||
|
return (
|
||||||
|
<Layout style={{ minHeight: "100vh", maxWidth: "120px" }}>
|
||||||
|
<Sider width={120}>
|
||||||
|
<Menu
|
||||||
|
mode="inline"
|
||||||
|
defaultSelectedKeys={
|
||||||
|
defaultSelectedKey ? defaultSelectedKey : ["4"]
|
||||||
|
}
|
||||||
|
style={{ height: "100%", borderRight: 0 }}
|
||||||
|
>
|
||||||
|
<Menu.Item key="4" onClick={() => setPage("api-keys")}>
|
||||||
|
API Keys
|
||||||
|
</Menu.Item>
|
||||||
|
<Menu.Item key="2" onClick={() => setPage("models")}>
|
||||||
|
Models
|
||||||
|
</Menu.Item>
|
||||||
|
<Menu.Item key="3" onClick={() => setPage("llm-playground")}>
|
||||||
|
Chat UI
|
||||||
|
</Menu.Item>
|
||||||
|
<Menu.Item key="1" onClick={() => setPage("usage")}>
|
||||||
|
Usage
|
||||||
|
</Menu.Item>
|
||||||
|
</Menu>
|
||||||
|
</Sider>
|
||||||
|
</Layout>
|
||||||
|
);
|
||||||
|
}
|
||||||
return (
|
return (
|
||||||
<Layout style={{ minHeight: "100vh", maxWidth: "120px" }}>
|
<Layout style={{ minHeight: "100vh", maxWidth: "120px" }}>
|
||||||
<Sider width={120}>
|
<Sider width={120}>
|
||||||
|
|
|
@ -1,8 +1,20 @@
|
||||||
import React, { useState, useEffect } from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { Card, Title, Subtitle, Table, TableHead, TableRow, TableCell, TableBody, Metric, Grid } from "@tremor/react";
|
import {
|
||||||
|
Card,
|
||||||
|
Title,
|
||||||
|
Subtitle,
|
||||||
|
Table,
|
||||||
|
TableHead,
|
||||||
|
TableRow,
|
||||||
|
TableCell,
|
||||||
|
TableBody,
|
||||||
|
Metric,
|
||||||
|
Grid,
|
||||||
|
} from "@tremor/react";
|
||||||
import { modelInfoCall, userGetRequesedtModelsCall } from "./networking";
|
import { modelInfoCall, userGetRequesedtModelsCall } from "./networking";
|
||||||
import { Badge, BadgeDelta, Button } from '@tremor/react';
|
import { Badge, BadgeDelta, Button } from "@tremor/react";
|
||||||
import RequestAccess from "./request_model_access";
|
import RequestAccess from "./request_model_access";
|
||||||
|
import { Typography } from "antd";
|
||||||
|
|
||||||
interface ModelDashboardProps {
|
interface ModelDashboardProps {
|
||||||
accessToken: string | null;
|
accessToken: string | null;
|
||||||
|
@ -20,7 +32,6 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
const [modelData, setModelData] = useState<any>({ data: [] });
|
const [modelData, setModelData] = useState<any>({ data: [] });
|
||||||
const [pendingRequests, setPendingRequests] = useState<any[]>([]);
|
const [pendingRequests, setPendingRequests] = useState<any[]>([]);
|
||||||
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!accessToken || !token || !userRole || !userID) {
|
if (!accessToken || !token || !userRole || !userID) {
|
||||||
return;
|
return;
|
||||||
|
@ -28,7 +39,11 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
const fetchData = async () => {
|
const fetchData = async () => {
|
||||||
try {
|
try {
|
||||||
// Replace with your actual API call for model data
|
// Replace with your actual API call for model data
|
||||||
const modelDataResponse = await modelInfoCall(accessToken, userID, userRole);
|
const modelDataResponse = await modelInfoCall(
|
||||||
|
accessToken,
|
||||||
|
userID,
|
||||||
|
userRole
|
||||||
|
);
|
||||||
console.log("Model data response:", modelDataResponse.data);
|
console.log("Model data response:", modelDataResponse.data);
|
||||||
setModelData(modelDataResponse);
|
setModelData(modelDataResponse);
|
||||||
|
|
||||||
|
@ -38,7 +53,6 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
console.log("Pending Requests:", pendingRequests);
|
console.log("Pending Requests:", pendingRequests);
|
||||||
setPendingRequests(user_requests.requests || []);
|
setPendingRequests(user_requests.requests || []);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("There was an error fetching the model data", error);
|
console.error("There was an error fetching the model data", error);
|
||||||
}
|
}
|
||||||
|
@ -67,9 +81,9 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
|
|
||||||
let defaultProvider = "openai";
|
let defaultProvider = "openai";
|
||||||
let provider = "";
|
let provider = "";
|
||||||
let input_cost = "Undefined"
|
let input_cost = "Undefined";
|
||||||
let output_cost = "Undefined"
|
let output_cost = "Undefined";
|
||||||
let max_tokens = "Undefined"
|
let max_tokens = "Undefined";
|
||||||
|
|
||||||
// Check if litellm_model_name is null or undefined
|
// Check if litellm_model_name is null or undefined
|
||||||
if (litellm_model_name) {
|
if (litellm_model_name) {
|
||||||
|
@ -81,7 +95,6 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
|
|
||||||
// If there is only one element, default provider to openai
|
// If there is only one element, default provider to openai
|
||||||
provider = splitModel.length === 1 ? defaultProvider : firstElement;
|
provider = splitModel.length === 1 ? defaultProvider : firstElement;
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// litellm_model_name is null or undefined, default provider to openai
|
// litellm_model_name is null or undefined, default provider to openai
|
||||||
provider = defaultProvider;
|
provider = defaultProvider;
|
||||||
|
@ -91,20 +104,29 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
input_cost = model_info?.input_cost_per_token;
|
input_cost = model_info?.input_cost_per_token;
|
||||||
output_cost = model_info?.output_cost_per_token;
|
output_cost = model_info?.output_cost_per_token;
|
||||||
max_tokens = model_info?.max_tokens;
|
max_tokens = model_info?.max_tokens;
|
||||||
|
|
||||||
}
|
}
|
||||||
modelData.data[i].provider = provider
|
modelData.data[i].provider = provider;
|
||||||
modelData.data[i].input_cost = input_cost
|
modelData.data[i].input_cost = input_cost;
|
||||||
modelData.data[i].output_cost = output_cost
|
modelData.data[i].output_cost = output_cost;
|
||||||
modelData.data[i].max_tokens = max_tokens
|
modelData.data[i].max_tokens = max_tokens;
|
||||||
|
|
||||||
all_models_on_proxy.push(curr_model.model_name);
|
all_models_on_proxy.push(curr_model.model_name);
|
||||||
|
|
||||||
console.log(modelData.data[i]);
|
console.log(modelData.data[i]);
|
||||||
|
|
||||||
}
|
}
|
||||||
// when users click request access show pop up to allow them to request access
|
// when users click request access show pop up to allow them to request access
|
||||||
|
|
||||||
|
if (userRole && userRole == "Admin Viewer") {
|
||||||
|
const { Title, Paragraph } = Typography;
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Title level={1}>Access Denied</Title>
|
||||||
|
<Paragraph>
|
||||||
|
Ask your proxy admin for access to view all models
|
||||||
|
</Paragraph>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{ width: "100%" }}>
|
<div style={{ width: "100%" }}>
|
||||||
|
@ -113,64 +135,101 @@ const ModelDashboard: React.FC<ModelDashboardProps> = ({
|
||||||
<Table className="mt-5">
|
<Table className="mt-5">
|
||||||
<TableHead>
|
<TableHead>
|
||||||
<TableRow>
|
<TableRow>
|
||||||
<TableCell><Title>Model Name </Title></TableCell>
|
<TableCell>
|
||||||
<TableCell><Title>Provider</Title></TableCell>
|
<Title>Model Name </Title>
|
||||||
<TableCell><Title>Access</Title></TableCell>
|
</TableCell>
|
||||||
<TableCell><Title>Input Price per token ($)</Title></TableCell>
|
<TableCell>
|
||||||
<TableCell><Title>Output Price per token ($)</Title></TableCell>
|
<Title>Provider</Title>
|
||||||
<TableCell><Title>Max Tokens</Title></TableCell>
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Access</Title>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Input Price per token ($)</Title>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Output Price per token ($)</Title>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Max Tokens</Title>
|
||||||
|
</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
</TableHead>
|
</TableHead>
|
||||||
<TableBody>
|
<TableBody>
|
||||||
{modelData.data.map((model: any) => (
|
{modelData.data.map((model: any) => (
|
||||||
<TableRow key={model.model_name}>
|
<TableRow key={model.model_name}>
|
||||||
|
<TableCell>
|
||||||
<TableCell><Title>{model.model_name}</Title></TableCell>
|
<Title>{model.model_name}</Title>
|
||||||
|
</TableCell>
|
||||||
<TableCell>{model.provider}</TableCell>
|
<TableCell>{model.provider}</TableCell>
|
||||||
|
|
||||||
<TableCell>
|
<TableCell>
|
||||||
{model.user_access ? <Badge color={"green"}>Yes</Badge> : <RequestAccess userModels={all_models_on_proxy} accessToken={accessToken} userID={userID}></RequestAccess>}
|
{model.user_access ? (
|
||||||
|
<Badge color={"green"}>Yes</Badge>
|
||||||
|
) : (
|
||||||
|
<RequestAccess
|
||||||
|
userModels={all_models_on_proxy}
|
||||||
|
accessToken={accessToken}
|
||||||
|
userID={userID}
|
||||||
|
></RequestAccess>
|
||||||
|
)}
|
||||||
</TableCell>
|
</TableCell>
|
||||||
|
|
||||||
<TableCell>{model.input_cost}</TableCell>
|
<TableCell>{model.input_cost}</TableCell>
|
||||||
<TableCell>{model.output_cost}</TableCell>
|
<TableCell>{model.output_cost}</TableCell>
|
||||||
<TableCell>{model.max_tokens}</TableCell>
|
<TableCell>{model.max_tokens}</TableCell>
|
||||||
|
|
||||||
|
|
||||||
</TableRow>
|
</TableRow>
|
||||||
))}
|
))}
|
||||||
</TableBody>
|
</TableBody>
|
||||||
</Table>
|
</Table>
|
||||||
</Card>
|
</Card>
|
||||||
{
|
{userRole === "Admin" &&
|
||||||
userRole === "Admin" && pendingRequests && pendingRequests.length > 0 ? (
|
pendingRequests &&
|
||||||
|
pendingRequests.length > 0 ? (
|
||||||
<Card>
|
<Card>
|
||||||
<Table>
|
<Table>
|
||||||
<TableHead>
|
<TableHead>
|
||||||
<Title>Pending Requests</Title>
|
<Title>Pending Requests</Title>
|
||||||
<TableRow>
|
<TableRow>
|
||||||
<TableCell><Title>User ID</Title></TableCell>
|
<TableCell>
|
||||||
<TableCell><Title>Requested Models</Title></TableCell>
|
<Title>User ID</Title>
|
||||||
<TableCell><Title>Justification</Title></TableCell>
|
</TableCell>
|
||||||
<TableCell><Title>Justification</Title></TableCell>
|
<TableCell>
|
||||||
|
<Title>Requested Models</Title>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Justification</Title>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Title>Justification</Title>
|
||||||
|
</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
</TableHead>
|
</TableHead>
|
||||||
<TableBody>
|
<TableBody>
|
||||||
{pendingRequests.map((request: any) => (
|
{pendingRequests.map((request: any) => (
|
||||||
<TableRow key={request.request_id}>
|
<TableRow key={request.request_id}>
|
||||||
<TableCell><p>{request.user_id}</p></TableCell>
|
<TableCell>
|
||||||
<TableCell><p>{request.models[0]}</p></TableCell>
|
<p>{request.user_id}</p>
|
||||||
<TableCell><p>{request.justification}</p></TableCell>
|
</TableCell>
|
||||||
<TableCell><p>{request.user_id}</p></TableCell>
|
<TableCell>
|
||||||
|
<p>{request.models[0]}</p>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<p>{request.justification}</p>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<p>{request.user_id}</p>
|
||||||
|
</TableCell>
|
||||||
<Button>Approve</Button>
|
<Button>Approve</Button>
|
||||||
<Button variant="secondary" className="ml-2">Deny</Button>
|
<Button variant="secondary" className="ml-2">
|
||||||
|
Deny
|
||||||
|
</Button>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
))}
|
))}
|
||||||
</TableBody>
|
</TableBody>
|
||||||
</Table>
|
</Table>
|
||||||
</Card>
|
</Card>
|
||||||
) : null
|
) : null}
|
||||||
}
|
|
||||||
</Grid>
|
</Grid>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
@ -280,7 +280,7 @@ export const modelAvailableCall = async (
|
||||||
|
|
||||||
export const keySpendLogsCall = async (accessToken: String, token: String) => {
|
export const keySpendLogsCall = async (accessToken: String, token: String) => {
|
||||||
try {
|
try {
|
||||||
const url = proxyBaseUrl ? `${proxyBaseUrl}/spend/logs` : `/spend/logs`;
|
const url = proxyBaseUrl ? `${proxyBaseUrl}/global/spend/logs` : `/global/spend/logs`;
|
||||||
console.log("in keySpendLogsCall:", url);
|
console.log("in keySpendLogsCall:", url);
|
||||||
const response = await fetch(`${url}/?api_key=${token}`, {
|
const response = await fetch(`${url}/?api_key=${token}`, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
|
@ -404,13 +404,13 @@ export const adminTopKeysCall = async (accessToken: String) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const adminTopModelsCall = async (accessToken: String) => {
|
export const adminTopEndUsersCall = async (accessToken: String) => {
|
||||||
try {
|
try {
|
||||||
let url = proxyBaseUrl
|
let url = proxyBaseUrl
|
||||||
? `${proxyBaseUrl}/global/spend/models?limit=5`
|
? `${proxyBaseUrl}/global/spend/end_users`
|
||||||
: `/global/spend/models?limit=5`;
|
: `/global/spend/end_users`;
|
||||||
|
|
||||||
message.info("Making spend models request");
|
message.info("Making top end users request");
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -426,7 +426,37 @@ export const adminTopModelsCall = async (accessToken: String) => {
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
console.log(data);
|
console.log(data);
|
||||||
message.success("Spend Logs received");
|
message.success("Top End users received");
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to create key:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const adminTopModelsCall = async (accessToken: String) => {
|
||||||
|
try {
|
||||||
|
let url = proxyBaseUrl
|
||||||
|
? `${proxyBaseUrl}/global/spend/models?limit=5`
|
||||||
|
: `/global/spend/models?limit=5`;
|
||||||
|
|
||||||
|
message.info("Making top models request");
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.text();
|
||||||
|
message.error(errorData);
|
||||||
|
throw new Error("Network response was not ok");
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
console.log(data);
|
||||||
|
message.success("Top Models received");
|
||||||
return data;
|
return data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to create key:", error);
|
console.error("Failed to create key:", error);
|
||||||
|
@ -718,3 +748,42 @@ export const userUpdateUserCall = async (
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export const PredictedSpendLogsCall = async (accessToken: string, requestData: any) => {
|
||||||
|
try {
|
||||||
|
let url = proxyBaseUrl
|
||||||
|
? `${proxyBaseUrl}/global/predict/spend/logs`
|
||||||
|
: `/global/predict/spend/logs`;
|
||||||
|
|
||||||
|
//message.info("Predicting spend logs request");
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify(
|
||||||
|
{
|
||||||
|
data: requestData
|
||||||
|
}
|
||||||
|
),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.text();
|
||||||
|
message.error(errorData);
|
||||||
|
throw new Error("Network response was not ok");
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
console.log(data);
|
||||||
|
//message.success("Predicted Logs received");
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to create key:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
|
@ -132,6 +132,7 @@ const UsagePage: React.FC<UsagePageProps> = ({
|
||||||
const currentDate = new Date();
|
const currentDate = new Date();
|
||||||
const [keySpendData, setKeySpendData] = useState<any[]>([]);
|
const [keySpendData, setKeySpendData] = useState<any[]>([]);
|
||||||
const [topKeys, setTopKeys] = useState<any[]>([]);
|
const [topKeys, setTopKeys] = useState<any[]>([]);
|
||||||
|
const [topModels, setTopModels] = useState<any[]>([]);
|
||||||
const [topUsers, setTopUsers] = useState<any[]>([]);
|
const [topUsers, setTopUsers] = useState<any[]>([]);
|
||||||
|
|
||||||
const firstDay = new Date(
|
const firstDay = new Date(
|
||||||
|
@ -175,7 +176,7 @@ const UsagePage: React.FC<UsagePageProps> = ({
|
||||||
* If user is App Owner - use the normal spend logs call
|
* If user is App Owner - use the normal spend logs call
|
||||||
*/
|
*/
|
||||||
console.log(`user role: ${userRole}`);
|
console.log(`user role: ${userRole}`);
|
||||||
if (userRole == "Admin") {
|
if (userRole == "Admin" || userRole == "Admin Viewer") {
|
||||||
const overall_spend = await adminSpendLogsCall(accessToken);
|
const overall_spend = await adminSpendLogsCall(accessToken);
|
||||||
setKeySpendData(overall_spend);
|
setKeySpendData(overall_spend);
|
||||||
const top_keys = await adminTopKeysCall(accessToken);
|
const top_keys = await adminTopKeysCall(accessToken);
|
||||||
|
@ -188,6 +189,11 @@ const UsagePage: React.FC<UsagePageProps> = ({
|
||||||
}));
|
}));
|
||||||
setTopKeys(filtered_keys);
|
setTopKeys(filtered_keys);
|
||||||
const top_models = await adminTopModelsCall(accessToken);
|
const top_models = await adminTopModelsCall(accessToken);
|
||||||
|
const filtered_models = top_models.map((k: any) => ({
|
||||||
|
key: k["model"],
|
||||||
|
spend: k["total_spend"],
|
||||||
|
}));
|
||||||
|
setTopModels(filtered_models);
|
||||||
} else if (userRole == "App Owner") {
|
} else if (userRole == "App Owner") {
|
||||||
await userSpendLogsCall(
|
await userSpendLogsCall(
|
||||||
accessToken,
|
accessToken,
|
||||||
|
@ -242,7 +248,7 @@ const UsagePage: React.FC<UsagePageProps> = ({
|
||||||
<Title>Monthly Spend</Title>
|
<Title>Monthly Spend</Title>
|
||||||
<BarChart
|
<BarChart
|
||||||
data={keySpendData}
|
data={keySpendData}
|
||||||
index="startTime"
|
index="date"
|
||||||
categories={["spend"]}
|
categories={["spend"]}
|
||||||
colors={["blue"]}
|
colors={["blue"]}
|
||||||
valueFormatter={valueFormatter}
|
valueFormatter={valueFormatter}
|
||||||
|
@ -285,6 +291,22 @@ const UsagePage: React.FC<UsagePageProps> = ({
|
||||||
/>
|
/>
|
||||||
</Card>
|
</Card>
|
||||||
</Col>
|
</Col>
|
||||||
|
<Col numColSpan={1}>
|
||||||
|
<Card>
|
||||||
|
<Title>Top Models</Title>
|
||||||
|
<BarChart
|
||||||
|
className="mt-4 h-40"
|
||||||
|
data={topModels}
|
||||||
|
index="key"
|
||||||
|
categories={["spend"]}
|
||||||
|
colors={["blue"]}
|
||||||
|
yAxisWidth={200}
|
||||||
|
layout="vertical"
|
||||||
|
showXAxis={false}
|
||||||
|
showLegend={false}
|
||||||
|
/>
|
||||||
|
</Card>
|
||||||
|
</Col>
|
||||||
</Grid>
|
</Grid>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
@ -8,7 +8,7 @@ import ViewUserSpend from "./view_user_spend";
|
||||||
import DashboardTeam from "./dashboard_default_team";
|
import DashboardTeam from "./dashboard_default_team";
|
||||||
import { useSearchParams, useRouter } from "next/navigation";
|
import { useSearchParams, useRouter } from "next/navigation";
|
||||||
import { jwtDecode } from "jwt-decode";
|
import { jwtDecode } from "jwt-decode";
|
||||||
|
import { Typography } from "antd";
|
||||||
const isLocal = process.env.NODE_ENV === "development";
|
const isLocal = process.env.NODE_ENV === "development";
|
||||||
console.log("isLocal:", isLocal);
|
console.log("isLocal:", isLocal);
|
||||||
const proxyBaseUrl = isLocal ? "http://localhost:4000" : null;
|
const proxyBaseUrl = isLocal ? "http://localhost:4000" : null;
|
||||||
|
@ -73,6 +73,10 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
||||||
return "App Owner";
|
return "App Owner";
|
||||||
case "app_admin":
|
case "app_admin":
|
||||||
return "Admin";
|
return "Admin";
|
||||||
|
case "proxy_admin":
|
||||||
|
return "Admin";
|
||||||
|
case "proxy_admin_viewer":
|
||||||
|
return "Admin Viewer";
|
||||||
case "app_user":
|
case "app_user":
|
||||||
return "App User";
|
return "App User";
|
||||||
default:
|
default:
|
||||||
|
@ -180,6 +184,16 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
||||||
setUserRole("App Owner");
|
setUserRole("App Owner");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (userRole && userRole == "Admin Viewer") {
|
||||||
|
const { Title, Paragraph } = Typography;
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Title level={1}>Access Denied</Title>
|
||||||
|
<Paragraph>Ask your proxy admin for access to create keys</Paragraph>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<Grid numItems={1} className="gap-0 p-10 h-[75vh] w-full">
|
<Grid numItems={1} className="gap-0 p-10 h-[75vh] w-full">
|
||||||
|
|
|
@ -21,7 +21,7 @@ import {
|
||||||
BarList,
|
BarList,
|
||||||
Metric,
|
Metric,
|
||||||
} from "@tremor/react";
|
} from "@tremor/react";
|
||||||
import { keySpendLogsCall } from "./networking";
|
import { keySpendLogsCall, PredictedSpendLogsCall } from "./networking";
|
||||||
|
|
||||||
interface ViewKeySpendReportProps {
|
interface ViewKeySpendReportProps {
|
||||||
token: string;
|
token: string;
|
||||||
|
@ -48,6 +48,7 @@ const ViewKeySpendReport: React.FC<ViewKeySpendReportProps> = ({
|
||||||
const [data, setData] = useState<{ day: string; spend: number }[] | null>(
|
const [data, setData] = useState<{ day: string; spend: number }[] | null>(
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
const [predictedSpendString, setPredictedSpendString] = useState("");
|
||||||
const [userData, setUserData] = useState<
|
const [userData, setUserData] = useState<
|
||||||
{ name: string; value: number }[] | null
|
{ name: string; value: number }[] | null
|
||||||
>(null);
|
>(null);
|
||||||
|
@ -78,85 +79,25 @@ const ViewKeySpendReport: React.FC<ViewKeySpendReportProps> = ({
|
||||||
(token = token)
|
(token = token)
|
||||||
);
|
);
|
||||||
console.log("Response:", response);
|
console.log("Response:", response);
|
||||||
// loop through response
|
setData(response);
|
||||||
// get spend, startTime for each element, place in new array
|
|
||||||
|
|
||||||
const pricePerDay: Record<string, number> = (
|
// predict spend based on response
|
||||||
Object.values(response) as ResponseValueType[]
|
const predictedSpend = await PredictedSpendLogsCall(accessToken, response);
|
||||||
).reduce((acc: Record<string, number>, value) => {
|
console.log("Response2:", predictedSpend);
|
||||||
const startTime = new Date(value.startTime);
|
|
||||||
const day = new Intl.DateTimeFormat("en-US", {
|
|
||||||
day: "2-digit",
|
|
||||||
month: "short",
|
|
||||||
}).format(startTime);
|
|
||||||
|
|
||||||
acc[day] = (acc[day] || 0) + value.spend;
|
// append predictedSpend to data
|
||||||
|
const combinedData = [...response, ...predictedSpend.response];
|
||||||
|
setData(combinedData);
|
||||||
|
setPredictedSpendString(predictedSpend.predicted_spend)
|
||||||
|
|
||||||
return acc;
|
console.log("Combined Data:", combinedData);
|
||||||
}, {});
|
// setPredictedSpend(predictedSpend);
|
||||||
|
|
||||||
// sort pricePerDay by day
|
|
||||||
// Convert object to array of key-value pairs
|
|
||||||
const pricePerDayArray = Object.entries(pricePerDay);
|
|
||||||
|
|
||||||
// Sort the array based on the date (key)
|
|
||||||
pricePerDayArray.sort(([aKey], [bKey]) => {
|
|
||||||
const dateA = new Date(aKey);
|
|
||||||
const dateB = new Date(bKey);
|
|
||||||
return dateA.getTime() - dateB.getTime();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Convert the sorted array back to an object
|
|
||||||
const sortedPricePerDay = Object.fromEntries(pricePerDayArray);
|
|
||||||
|
|
||||||
console.log(sortedPricePerDay);
|
|
||||||
|
|
||||||
const pricePerUser: Record<string, number> = (
|
|
||||||
Object.values(response) as ResponseValueType[]
|
|
||||||
).reduce((acc: Record<string, number>, value) => {
|
|
||||||
const user = value.user;
|
|
||||||
acc[user] = (acc[user] || 0) + value.spend;
|
|
||||||
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
console.log(pricePerDay);
|
|
||||||
console.log(pricePerUser);
|
|
||||||
|
|
||||||
const arrayBarChart = [];
|
|
||||||
// [
|
|
||||||
// {
|
|
||||||
// "day": "02 Feb",
|
|
||||||
// "spend": pricePerDay["02 Feb"],
|
|
||||||
// }
|
|
||||||
// ]
|
|
||||||
for (const [key, value] of Object.entries(sortedPricePerDay)) {
|
|
||||||
arrayBarChart.push({ day: key, spend: value });
|
|
||||||
}
|
|
||||||
|
|
||||||
// get 5 most expensive users
|
|
||||||
const sortedUsers = Object.entries(pricePerUser).sort(
|
|
||||||
(a, b) => b[1] - a[1]
|
|
||||||
);
|
|
||||||
const top5Users = sortedUsers.slice(0, 5);
|
|
||||||
const userChart = top5Users.map(([key, value]) => ({
|
|
||||||
name: key,
|
|
||||||
value: value,
|
|
||||||
}));
|
|
||||||
|
|
||||||
setData(arrayBarChart);
|
|
||||||
setUserData(userChart);
|
|
||||||
console.log("arrayBarChart:", arrayBarChart);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("There was an error fetching the data", error);
|
console.error("There was an error fetching the data", error);
|
||||||
// Optionally, update your UI to reflect the error state here as well
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// useEffect(() => {
|
|
||||||
// // Fetch data only when the token changes
|
|
||||||
// fetchData();
|
|
||||||
// }, [token]); // Dependency array containing the 'token' variable
|
|
||||||
|
|
||||||
if (!token) {
|
if (!token) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -164,12 +105,12 @@ const ViewKeySpendReport: React.FC<ViewKeySpendReportProps> = ({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<Button className="mx-auto" onClick={showModal}>
|
<Button size = "xs" onClick={showModal}>
|
||||||
View Spend Report
|
View Spend Report
|
||||||
</Button>
|
</Button>
|
||||||
<Modal
|
<Modal
|
||||||
visible={isModalVisible}
|
visible={isModalVisible}
|
||||||
width={1000}
|
width={1400}
|
||||||
onOk={handleOk}
|
onOk={handleOk}
|
||||||
onCancel={handleCancel}
|
onCancel={handleCancel}
|
||||||
footer={null}
|
footer={null}
|
||||||
|
@ -177,25 +118,21 @@ const ViewKeySpendReport: React.FC<ViewKeySpendReportProps> = ({
|
||||||
<Title style={{ textAlign: "left" }}>Key Name: {keyName}</Title>
|
<Title style={{ textAlign: "left" }}>Key Name: {keyName}</Title>
|
||||||
|
|
||||||
<Metric>Monthly Spend ${keySpend}</Metric>
|
<Metric>Monthly Spend ${keySpend}</Metric>
|
||||||
|
<Title>{predictedSpendString}</Title>
|
||||||
|
|
||||||
<Card className="mt-6 mb-6">
|
<Card className="mt-6 mb-6">
|
||||||
{data && (
|
{data && (
|
||||||
<BarChart
|
<BarChart
|
||||||
className="mt-6"
|
className="mt-6"
|
||||||
data={data}
|
data={data}
|
||||||
colors={["green"]}
|
colors={["blue", "amber"]}
|
||||||
index="day"
|
index="date"
|
||||||
categories={["spend"]}
|
categories={["spend", "predicted_spend"]}
|
||||||
yAxisWidth={48}
|
yAxisWidth={80}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</Card>
|
</Card>
|
||||||
<Title className="mt-6">Top 5 Users Spend (USD)</Title>
|
|
||||||
<Card className="mb-6">
|
|
||||||
{userData && (
|
|
||||||
<BarList className="mt-6" data={userData} color="teal" />
|
|
||||||
)}
|
|
||||||
</Card>
|
|
||||||
</Modal>
|
</Modal>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
@ -86,6 +86,7 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
|
||||||
<TableHeaderCell>Secret Key</TableHeaderCell>
|
<TableHeaderCell>Secret Key</TableHeaderCell>
|
||||||
<TableHeaderCell>Spend (USD)</TableHeaderCell>
|
<TableHeaderCell>Spend (USD)</TableHeaderCell>
|
||||||
<TableHeaderCell>Key Budget (USD)</TableHeaderCell>
|
<TableHeaderCell>Key Budget (USD)</TableHeaderCell>
|
||||||
|
<TableHeaderCell>Spend Report</TableHeaderCell>
|
||||||
<TableHeaderCell>Team ID</TableHeaderCell>
|
<TableHeaderCell>Team ID</TableHeaderCell>
|
||||||
<TableHeaderCell>Metadata</TableHeaderCell>
|
<TableHeaderCell>Metadata</TableHeaderCell>
|
||||||
<TableHeaderCell>Models</TableHeaderCell>
|
<TableHeaderCell>Models</TableHeaderCell>
|
||||||
|
@ -122,6 +123,15 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
|
||||||
<Text>Unlimited Budget</Text>
|
<Text>Unlimited Budget</Text>
|
||||||
)}
|
)}
|
||||||
</TableCell>
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<ViewKeySpendReport
|
||||||
|
token={item.token}
|
||||||
|
accessToken={accessToken}
|
||||||
|
keySpend={item.spend}
|
||||||
|
keyBudget={item.max_budget}
|
||||||
|
keyName={item.key_name}
|
||||||
|
/>
|
||||||
|
</TableCell>
|
||||||
<TableCell>
|
<TableCell>
|
||||||
<Text>{item.team_id}</Text>
|
<Text>{item.team_id}</Text>
|
||||||
</TableCell>
|
</TableCell>
|
||||||
|
@ -152,15 +162,6 @@ const ViewKeyTable: React.FC<ViewKeyTableProps> = ({
|
||||||
size="sm"
|
size="sm"
|
||||||
/>
|
/>
|
||||||
</TableCell>
|
</TableCell>
|
||||||
<TableCell>
|
|
||||||
<ViewKeySpendReport
|
|
||||||
token={item.token}
|
|
||||||
accessToken={accessToken}
|
|
||||||
keySpend={item.spend}
|
|
||||||
keyBudget={item.max_budget}
|
|
||||||
keyName={item.key_name}
|
|
||||||
/>
|
|
||||||
</TableCell>
|
|
||||||
</TableRow>
|
</TableRow>
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
|
|
|
@ -1,7 +1,24 @@
|
||||||
import React, { useState, useEffect } from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { Card, Title, Subtitle, Table, TableHead, TableRow, TableCell, TableBody, Metric, Grid } from "@tremor/react";
|
import {
|
||||||
import { userInfoCall } from "./networking";
|
Card,
|
||||||
import { Badge, BadgeDelta, Button } from '@tremor/react';
|
Title,
|
||||||
|
Subtitle,
|
||||||
|
Table,
|
||||||
|
TableHead,
|
||||||
|
TableHeaderCell,
|
||||||
|
TableRow,
|
||||||
|
TableCell,
|
||||||
|
TableBody,
|
||||||
|
Tab,
|
||||||
|
TabGroup,
|
||||||
|
TabList,
|
||||||
|
TabPanels,
|
||||||
|
Metric,
|
||||||
|
Grid,
|
||||||
|
TabPanel,
|
||||||
|
} from "@tremor/react";
|
||||||
|
import { userInfoCall, adminTopEndUsersCall } from "./networking";
|
||||||
|
import { Badge, BadgeDelta, Button } from "@tremor/react";
|
||||||
import RequestAccess from "./request_model_access";
|
import RequestAccess from "./request_model_access";
|
||||||
import CreateUser from "./create_user_button";
|
import CreateUser from "./create_user_button";
|
||||||
|
|
||||||
|
@ -18,9 +35,10 @@ const ViewUserDashboard: React.FC<ViewUserDashboardProps> = ({
|
||||||
userRole,
|
userRole,
|
||||||
userID,
|
userID,
|
||||||
}) => {
|
}) => {
|
||||||
const [userData, setuserData] = useState<null | any[]>(null);
|
const [userData, setUserData] = useState<null | any[]>(null);
|
||||||
const [pendingRequests, setPendingRequests] = useState<any[]>([]);
|
const [endUsers, setEndUsers] = useState<null | any[]>(null);
|
||||||
|
const [currentPage, setCurrentPage] = useState(1);
|
||||||
|
const defaultPageSize = 25;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!accessToken || !token || !userRole || !userID) {
|
if (!accessToken || !token || !userRole || !userID) {
|
||||||
|
@ -29,18 +47,39 @@ const ViewUserDashboard: React.FC<ViewUserDashboardProps> = ({
|
||||||
const fetchData = async () => {
|
const fetchData = async () => {
|
||||||
try {
|
try {
|
||||||
// Replace with your actual API call for model data
|
// Replace with your actual API call for model data
|
||||||
const userDataResponse = await userInfoCall(accessToken, null, userRole, true);
|
const userDataResponse = await userInfoCall(
|
||||||
|
accessToken,
|
||||||
|
null,
|
||||||
|
userRole,
|
||||||
|
true
|
||||||
|
);
|
||||||
console.log("user data response:", userDataResponse);
|
console.log("user data response:", userDataResponse);
|
||||||
setuserData(userDataResponse);
|
setUserData(userDataResponse);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("There was an error fetching the model data", error);
|
console.error("There was an error fetching the model data", error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (accessToken && token && userRole && userID) {
|
if (accessToken && token && userRole && userID && !userData) {
|
||||||
fetchData();
|
fetchData();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fetchEndUserSpend = async () => {
|
||||||
|
try {
|
||||||
|
const topEndUsers = await adminTopEndUsersCall(accessToken);
|
||||||
|
console.log("user data response:", topEndUsers);
|
||||||
|
setEndUsers(topEndUsers);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("There was an error fetching the model data", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if (
|
||||||
|
userRole &&
|
||||||
|
(userRole == "Admin" || userRole == "Admin Viewer") &&
|
||||||
|
!endUsers
|
||||||
|
) {
|
||||||
|
fetchEndUserSpend();
|
||||||
|
}
|
||||||
}, [accessToken, token, userRole, userID]);
|
}, [accessToken, token, userRole, userID]);
|
||||||
|
|
||||||
if (!userData) {
|
if (!userData) {
|
||||||
|
@ -51,40 +90,106 @@ const ViewUserDashboard: React.FC<ViewUserDashboardProps> = ({
|
||||||
return <div>Loading...</div>;
|
return <div>Loading...</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
// when users click request access show pop up to allow them to request access
|
function renderPagination() {
|
||||||
|
if (!userData) return null;
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(userData.length / defaultPageSize);
|
||||||
|
const startItem = (currentPage - 1) * defaultPageSize + 1;
|
||||||
|
const endItem = Math.min(currentPage * defaultPageSize, userData.length);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex justify-between items-center">
|
||||||
|
<div>
|
||||||
|
Showing {startItem} – {endItem} of {userData.length}
|
||||||
|
</div>
|
||||||
|
<div className="flex">
|
||||||
|
<button
|
||||||
|
className="bg-blue-500 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded-l focus:outline-none"
|
||||||
|
disabled={currentPage === 1}
|
||||||
|
onClick={() => setCurrentPage(currentPage - 1)}
|
||||||
|
>
|
||||||
|
← Prev
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="bg-blue-500 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded-r focus:outline-none"
|
||||||
|
disabled={currentPage === totalPages}
|
||||||
|
onClick={() => setCurrentPage(currentPage + 1)}
|
||||||
|
>
|
||||||
|
Next →
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{ width: "100%" }}>
|
<div style={{ width: "100%" }}>
|
||||||
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
<Grid className="gap-2 p-10 h-[75vh] w-full">
|
||||||
<CreateUser
|
<CreateUser userID={userID} accessToken={accessToken} />
|
||||||
userID={userID}
|
<Card className="w-full mx-auto flex-auto overflow-y-auto max-h-[50vh] mb-4">
|
||||||
accessToken={accessToken}
|
<TabGroup>
|
||||||
/>
|
<TabList variant="line" defaultValue="1">
|
||||||
<Card>
|
<Tab value="1">Key Owners</Tab>
|
||||||
|
<Tab value="2">End-Users</Tab>
|
||||||
|
</TabList>
|
||||||
|
<TabPanels>
|
||||||
|
<TabPanel>
|
||||||
<Table className="mt-5">
|
<Table className="mt-5">
|
||||||
<TableHead>
|
<TableHead>
|
||||||
<TableRow>
|
<TableRow>
|
||||||
<TableCell><Title>User ID </Title></TableCell>
|
<TableHeaderCell>User ID</TableHeaderCell>
|
||||||
<TableCell><Title>User Role</Title></TableCell>
|
<TableHeaderCell>User Role</TableHeaderCell>
|
||||||
<TableCell><Title>User Models</Title></TableCell>
|
<TableHeaderCell>User Models</TableHeaderCell>
|
||||||
<TableCell><Title>User Spend ($ USD)</Title></TableCell>
|
<TableHeaderCell>User Spend ($ USD)</TableHeaderCell>
|
||||||
<TableCell><Title>User Max Budget ($ USD)</Title></TableCell>
|
<TableHeaderCell>User Max Budget ($ USD)</TableHeaderCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
</TableHead>
|
</TableHead>
|
||||||
<TableBody>
|
<TableBody>
|
||||||
{userData.map((user: any) => (
|
{userData.map((user: any) => (
|
||||||
<TableRow key={user.user_id}>
|
<TableRow key={user.user_id}>
|
||||||
<TableCell><Title>{user.user_id}</Title></TableCell>
|
<TableCell>{user.user_id}</TableCell>
|
||||||
<TableCell><Title>{user.user_role ? user.user_role : "app_user"}</Title></TableCell>
|
<TableCell>
|
||||||
<TableCell><Title>{user.models && user.models.length > 0 ? user.models : "All Models"}</Title></TableCell>
|
{user.user_role ? user.user_role : "app_owner"}
|
||||||
<TableCell><Title>{user.spend ? user.spend : 0}</Title></TableCell>
|
</TableCell>
|
||||||
<TableCell><Title>{user.max_budget ? user.max_budget : "Unlimited"}</Title></TableCell>
|
<TableCell>
|
||||||
|
{user.models && user.models.length > 0
|
||||||
|
? user.models
|
||||||
|
: "All Models"}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>{user.spend ? user.spend : 0}</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{user.max_budget ? user.max_budget : "Unlimited"}
|
||||||
|
</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
))}
|
))}
|
||||||
</TableBody>
|
</TableBody>
|
||||||
</Table>
|
</Table>
|
||||||
|
</TabPanel>
|
||||||
|
<TabPanel>
|
||||||
|
<Table>
|
||||||
|
<TableHead>
|
||||||
|
<TableRow>
|
||||||
|
<TableHeaderCell>End User</TableHeaderCell>
|
||||||
|
<TableHeaderCell>Spend</TableHeaderCell>
|
||||||
|
<TableHeaderCell>Total Events</TableHeaderCell>
|
||||||
|
</TableRow>
|
||||||
|
</TableHead>
|
||||||
|
|
||||||
|
<TableBody>
|
||||||
|
{endUsers?.map((user: any, index: number) => (
|
||||||
|
<TableRow key={index}>
|
||||||
|
<TableCell>{user.end_user}</TableCell>
|
||||||
|
<TableCell>{user.total_spend}</TableCell>
|
||||||
|
<TableCell>{user.total_events}</TableCell>
|
||||||
|
</TableRow>
|
||||||
|
))}
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
</TabPanel>
|
||||||
|
</TabPanels>
|
||||||
|
</TabGroup>
|
||||||
</Card>
|
</Card>
|
||||||
|
{renderPagination()}
|
||||||
</Grid>
|
</Grid>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue