mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
Merge branch 'BerriAI:main' into peteski22/bug/get_optional_params
This commit is contained in:
commit
2ebeccb2eb
103 changed files with 1302 additions and 307 deletions
|
@ -16,6 +16,7 @@ spec:
|
|||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
serviceAccountName: {{ include "litellm.serviceAccountName" . }}
|
||||
containers:
|
||||
- name: prisma-migrations
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default (printf "main-%s" .Chart.AppVersion) }}"
|
||||
|
|
|
@ -3,7 +3,7 @@ import TabItem from '@theme/TabItem';
|
|||
|
||||
# Using Audio Models
|
||||
|
||||
How to send / receieve audio to a `/chat/completions` endpoint
|
||||
How to send / receive audio to a `/chat/completions` endpoint
|
||||
|
||||
|
||||
## Audio Output from a model
|
||||
|
|
|
@ -3,7 +3,7 @@ import TabItem from '@theme/TabItem';
|
|||
|
||||
# Using PDF Input
|
||||
|
||||
How to send / receieve pdf's (other document types) to a `/chat/completions` endpoint
|
||||
How to send / receive pdf's (other document types) to a `/chat/completions` endpoint
|
||||
|
||||
Works for:
|
||||
- Vertex AI models (Gemini + Anthropic)
|
||||
|
|
|
@ -194,7 +194,7 @@ Expected Response
|
|||
|
||||
## Explicitly specify image type
|
||||
|
||||
If you have images without a mime-type, or if litellm is incorrectly inferring the mime type of your image (e.g. calling `gs://` url's with vertex ai), you can set this explicity via the `format` param.
|
||||
If you have images without a mime-type, or if litellm is incorrectly inferring the mime type of your image (e.g. calling `gs://` url's with vertex ai), you can set this explicitly via the `format` param.
|
||||
|
||||
```python
|
||||
"image_url": {
|
||||
|
|
|
@ -53,7 +53,7 @@ response = completion(
|
|||
|
||||
## Additional information in metadata
|
||||
|
||||
You can send any additional information to Greenscale by using the `metadata` field in completion and `greenscale_` prefix. This can be useful for sending metadata about the request, such as the project and application name, customer_id, enviornment, or any other information you want to track usage. `greenscale_project` and `greenscale_application` are required fields.
|
||||
You can send any additional information to Greenscale by using the `metadata` field in completion and `greenscale_` prefix. This can be useful for sending metadata about the request, such as the project and application name, customer_id, environment, or any other information you want to track usage. `greenscale_project` and `greenscale_application` are required fields.
|
||||
|
||||
```python
|
||||
#openai call with additional metadata
|
||||
|
|
|
@ -185,7 +185,7 @@ curl --location --request POST 'http://0.0.0.0:4000/chat/completions' \
|
|||
* `trace_release` - Release for the trace, defaults to `None`
|
||||
* `trace_metadata` - Metadata for the trace, defaults to `None`
|
||||
* `trace_user_id` - User identifier for the trace, defaults to completion argument `user`
|
||||
* `tags` - Tags for the trace, defeaults to `None`
|
||||
* `tags` - Tags for the trace, defaults to `None`
|
||||
|
||||
##### Updatable Parameters on Continuation
|
||||
|
||||
|
|
|
@ -222,7 +222,7 @@ curl http://localhost:4000/vertex-ai/v1/projects/${PROJECT_ID}/locations/us-cent
|
|||
|
||||
LiteLLM Proxy Server supports two methods of authentication to Vertex AI:
|
||||
|
||||
1. Pass Vertex Credetials client side to proxy server
|
||||
1. Pass Vertex Credentials client side to proxy server
|
||||
|
||||
2. Set Vertex AI credentials on proxy server
|
||||
|
||||
|
|
|
@ -1095,7 +1095,7 @@ response = completion(
|
|||
print(response.choices[0])
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem value="proxy" lable="PROXY">
|
||||
<TabItem value="proxy" label="PROXY">
|
||||
|
||||
1. Add model to config
|
||||
|
||||
|
|
|
@ -483,7 +483,7 @@ response.stream_to_file(speech_file_path)
|
|||
This is a walkthrough on how to use Azure Active Directory Tokens - Microsoft Entra ID to make `litellm.completion()` calls
|
||||
|
||||
Step 1 - Download Azure CLI
|
||||
Installation instructons: https://learn.microsoft.com/en-us/cli/azure/install-azure-cli
|
||||
Installation instructions: https://learn.microsoft.com/en-us/cli/azure/install-azure-cli
|
||||
```shell
|
||||
brew update && brew install azure-cli
|
||||
```
|
||||
|
|
|
@ -655,7 +655,7 @@ import os
|
|||
|
||||
os.environ["GEMINI_API_KEY"] = ".."
|
||||
|
||||
tools = [{"googleSearchRetrieval": {}}] # 👈 ADD GOOGLE SEARCH
|
||||
tools = [{"googleSearch": {}}] # 👈 ADD GOOGLE SEARCH
|
||||
|
||||
response = completion(
|
||||
model="gemini/gemini-2.0-flash",
|
||||
|
@ -691,7 +691,7 @@ curl -X POST 'http://0.0.0.0:4000/chat/completions' \
|
|||
-d '{
|
||||
"model": "gemini-2.0-flash",
|
||||
"messages": [{"role": "user", "content": "What is the weather in San Francisco?"}],
|
||||
"tools": [{"googleSearchRetrieval": {}}]
|
||||
"tools": [{"googleSearch": {}}]
|
||||
}
|
||||
'
|
||||
```
|
||||
|
|
|
@ -364,7 +364,7 @@ from litellm import completion
|
|||
## SETUP ENVIRONMENT
|
||||
# !gcloud auth application-default login - run this to add vertex credentials to your env
|
||||
|
||||
tools = [{"googleSearchRetrieval": {}}] # 👈 ADD GOOGLE SEARCH
|
||||
tools = [{"googleSearch": {}}] # 👈 ADD GOOGLE SEARCH
|
||||
|
||||
resp = litellm.completion(
|
||||
model="vertex_ai/gemini-1.0-pro-001",
|
||||
|
@ -391,7 +391,7 @@ client = OpenAI(
|
|||
response = client.chat.completions.create(
|
||||
model="gemini-pro",
|
||||
messages=[{"role": "user", "content": "Who won the world cup?"}],
|
||||
tools=[{"googleSearchRetrieval": {}}],
|
||||
tools=[{"googleSearch": {}}],
|
||||
)
|
||||
|
||||
print(response)
|
||||
|
@ -410,7 +410,7 @@ curl http://localhost:4000/v1/chat/completions \
|
|||
],
|
||||
"tools": [
|
||||
{
|
||||
"googleSearchRetrieval": {}
|
||||
"googleSearch": {}
|
||||
}
|
||||
]
|
||||
}'
|
||||
|
@ -529,7 +529,7 @@ from litellm import completion
|
|||
|
||||
# !gcloud auth application-default login - run this to add vertex credentials to your env
|
||||
|
||||
tools = [{"googleSearchRetrieval": {"disable_attributon": False}}] # 👈 ADD GOOGLE SEARCH
|
||||
tools = [{"googleSearch": {"disable_attributon": False}}] # 👈 ADD GOOGLE SEARCH
|
||||
|
||||
resp = litellm.completion(
|
||||
model="vertex_ai/gemini-1.0-pro-001",
|
||||
|
@ -692,7 +692,7 @@ curl http://0.0.0.0:4000/v1/chat/completions \
|
|||
|
||||
### **Context Caching**
|
||||
|
||||
Use Vertex AI context caching is supported by calling provider api directly. (Unified Endpoint support comin soon.).
|
||||
Use Vertex AI context caching is supported by calling provider api directly. (Unified Endpoint support coming soon.).
|
||||
|
||||
[**Go straight to provider**](../pass_through/vertex_ai.md#context-caching)
|
||||
|
||||
|
@ -910,7 +910,7 @@ export VERTEXAI_PROJECT="my-test-project" # ONLY use if model project is differe
|
|||
|
||||
|
||||
## Specifying Safety Settings
|
||||
In certain use-cases you may need to make calls to the models and pass [safety settigns](https://ai.google.dev/docs/safety_setting_gemini) different from the defaults. To do so, simple pass the `safety_settings` argument to `completion` or `acompletion`. For example:
|
||||
In certain use-cases you may need to make calls to the models and pass [safety settings](https://ai.google.dev/docs/safety_setting_gemini) different from the defaults. To do so, simple pass the `safety_settings` argument to `completion` or `acompletion`. For example:
|
||||
|
||||
### Set per model/request
|
||||
|
||||
|
@ -2050,7 +2050,7 @@ response = completion(
|
|||
print(response.choices[0])
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem value="proxy" lable="PROXY">
|
||||
<TabItem value="proxy" label="PROXY">
|
||||
|
||||
1. Add model to config
|
||||
|
||||
|
|
|
@ -243,12 +243,12 @@ We allow you to pass a local image or a an http/https url of your image
|
|||
|
||||
Set `UI_LOGO_PATH` on your env. We recommend using a hosted image, it's a lot easier to set up and configure / debug
|
||||
|
||||
Exaple setting Hosted image
|
||||
Example setting Hosted image
|
||||
```shell
|
||||
UI_LOGO_PATH="https://litellm-logo-aws-marketplace.s3.us-west-2.amazonaws.com/berriai-logo-github.png"
|
||||
```
|
||||
|
||||
Exaple setting a local image (on your container)
|
||||
Example setting a local image (on your container)
|
||||
```shell
|
||||
UI_LOGO_PATH="ui_images/logo.jpg"
|
||||
```
|
||||
|
|
|
@ -213,7 +213,7 @@ model_list:
|
|||
general_settings:
|
||||
master_key: sk-1234
|
||||
alerting: ["slack"]
|
||||
alerting_threshold: 0.0001 # (Seconds) set an artifically low threshold for testing alerting
|
||||
alerting_threshold: 0.0001 # (Seconds) set an artificially low threshold for testing alerting
|
||||
alert_to_webhook_url: {
|
||||
"llm_exceptions": "https://hooks.slack.com/services/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",
|
||||
"llm_too_slow": "https://hooks.slack.com/services/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",
|
||||
|
@ -247,7 +247,7 @@ model_list:
|
|||
general_settings:
|
||||
master_key: sk-1234
|
||||
alerting: ["slack"]
|
||||
alerting_threshold: 0.0001 # (Seconds) set an artifically low threshold for testing alerting
|
||||
alerting_threshold: 0.0001 # (Seconds) set an artificially low threshold for testing alerting
|
||||
alert_to_webhook_url: {
|
||||
"llm_exceptions": ["os.environ/SLACK_WEBHOOK_URL", "os.environ/SLACK_WEBHOOK_URL_2"],
|
||||
"llm_too_slow": ["https://webhook.site/7843a980-a494-4967-80fb-d502dbc16886", "https://webhook.site/28cfb179-f4fb-4408-8129-729ff55cf213"],
|
||||
|
@ -425,7 +425,7 @@ curl -X GET --location 'http://0.0.0.0:4000/health/services?service=webhook' \
|
|||
- `projected_exceeded_date` *str or null*: The date when the budget is projected to be exceeded, returned when 'soft_budget' is set for key (optional).
|
||||
- `projected_spend` *float or null*: The projected spend amount, returned when 'soft_budget' is set for key (optional).
|
||||
- `event` *Literal["budget_crossed", "threshold_crossed", "projected_limit_exceeded"]*: The type of event that triggered the webhook. Possible values are:
|
||||
* "spend_tracked": Emitted whenver spend is tracked for a customer id.
|
||||
* "spend_tracked": Emitted whenever spend is tracked for a customer id.
|
||||
* "budget_crossed": Indicates that the spend has exceeded the max budget.
|
||||
* "threshold_crossed": Indicates that spend has crossed a threshold (currently sent when 85% and 95% of budget is reached).
|
||||
* "projected_limit_exceeded": For "key" only - Indicates that the projected spend is expected to exceed the soft budget threshold.
|
||||
|
@ -480,7 +480,7 @@ LLM-related Alerts
|
|||
| `cooldown_deployment` | Alerts when a deployment is put into cooldown | ✅ |
|
||||
| `new_model_added` | Notifications when a new model is added to litellm proxy through /model/new| ✅ |
|
||||
| `outage_alerts` | Alerts when a specific LLM deployment is facing an outage | ✅ |
|
||||
| `region_outage_alerts` | Alerts when a specfic LLM region is facing an outage. Example us-east-1 | ✅ |
|
||||
| `region_outage_alerts` | Alerts when a specific LLM region is facing an outage. Example us-east-1 | ✅ |
|
||||
|
||||
Budget and Spend Alerts
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ model_list:
|
|||
model: azure/<your_deployment_name>
|
||||
api_key: os.environ/AZURE_API_KEY
|
||||
api_base: os.environ/AZURE_API_BASE
|
||||
api_version: os.envrion/AZURE_API_VERSION
|
||||
api_version: os.environ/AZURE_API_VERSION
|
||||
model_info:
|
||||
input_cost_per_token: 0.000421 # 👈 ONLY to track cost per token
|
||||
output_cost_per_token: 0.000520 # 👈 ONLY to track cost per token
|
||||
|
@ -133,4 +133,4 @@ acompletion(
|
|||
|
||||
If these keys are not present, LiteLLM will not use your custom pricing.
|
||||
|
||||
If the problem persists, please file an issue on [GitHub](https://github.com/BerriAI/litellm/issues).
|
||||
If the problem persists, please file an issue on [GitHub](https://github.com/BerriAI/litellm/issues).
|
||||
|
|
|
@ -19,7 +19,7 @@ LiteLLM writes `UPDATE` and `UPSERT` queries to the DB. When using 10+ instances
|
|||
|
||||
### Stage 1. Each instance writes updates to redis
|
||||
|
||||
Each instance will accumlate the spend updates for a key, user, team, etc and write the updates to a redis queue.
|
||||
Each instance will accumulate the spend updates for a key, user, team, etc and write the updates to a redis queue.
|
||||
|
||||
<Image img={require('../../img/deadlock_fix_1.png')} style={{ width: '900px', height: 'auto' }} />
|
||||
<p style={{textAlign: 'left', color: '#666'}}>
|
||||
|
|
|
@ -22,7 +22,7 @@ echo 'LITELLM_MASTER_KEY="sk-1234"' > .env
|
|||
|
||||
# Add the litellm salt key - you cannot change this after adding a model
|
||||
# It is used to encrypt / decrypt your LLM API Key credentials
|
||||
# We recommned - https://1password.com/password-generator/
|
||||
# We recommend - https://1password.com/password-generator/
|
||||
# password generator to get a random hash for litellm salt key
|
||||
echo 'LITELLM_SALT_KEY="sk-1234"' >> .env
|
||||
|
||||
|
@ -125,7 +125,7 @@ CMD ["--port", "4000", "--config", "config.yaml", "--detailed_debug"]
|
|||
|
||||
### Build from litellm `pip` package
|
||||
|
||||
Follow these instructons to build a docker container from the litellm pip package. If your company has a strict requirement around security / building images you can follow these steps.
|
||||
Follow these instructions to build a docker container from the litellm pip package. If your company has a strict requirement around security / building images you can follow these steps.
|
||||
|
||||
Dockerfile
|
||||
|
||||
|
@ -999,7 +999,7 @@ services:
|
|||
- "4000:4000" # Map the container port to the host, change the host port if necessary
|
||||
volumes:
|
||||
- ./litellm-config.yaml:/app/config.yaml # Mount the local configuration file
|
||||
# You can change the port or number of workers as per your requirements or pass any new supported CLI augument. Make sure the port passed here matches with the container port defined above in `ports` value
|
||||
# You can change the port or number of workers as per your requirements or pass any new supported CLI argument. Make sure the port passed here matches with the container port defined above in `ports` value
|
||||
command: [ "--config", "/app/config.yaml", "--port", "4000", "--num_workers", "8" ]
|
||||
|
||||
# ...rest of your docker-compose config if any
|
||||
|
|
|
@ -691,7 +691,7 @@ curl --request POST \
|
|||
<TabItem value="admin_only_routes" label="Test `admin_only_routes`">
|
||||
|
||||
|
||||
**Successfull Request**
|
||||
**Successful Request**
|
||||
|
||||
```shell
|
||||
curl --location 'http://0.0.0.0:4000/key/generate' \
|
||||
|
@ -729,7 +729,7 @@ curl --location 'http://0.0.0.0:4000/key/generate' \
|
|||
<TabItem value="allowed_routes" label="Test `allowed_routes`">
|
||||
|
||||
|
||||
**Successfull Request**
|
||||
**Successful Request**
|
||||
|
||||
```shell
|
||||
curl http://localhost:4000/chat/completions \
|
||||
|
|
|
@ -164,7 +164,7 @@ curl -i http://localhost:4000/v1/chat/completions \
|
|||
|
||||
**Expected response**
|
||||
|
||||
Your response headers will incude `x-litellm-applied-guardrails` with the guardrail applied
|
||||
Your response headers will include `x-litellm-applied-guardrails` with the guardrail applied
|
||||
|
||||
```
|
||||
x-litellm-applied-guardrails: aporia-pre-guard
|
||||
|
|
|
@ -277,7 +277,7 @@ Found under `kwargs["standard_logging_object"]`. This is a standard payload, log
|
|||
|
||||
## Langfuse
|
||||
|
||||
We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your environment
|
||||
We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successful LLM calls to langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your environment
|
||||
|
||||
**Step 1** Install langfuse
|
||||
|
||||
|
@ -534,15 +534,15 @@ print(response)
|
|||
|
||||
Use this if you want to control which LiteLLM-specific fields are logged as tags by the LiteLLM proxy. By default LiteLLM Proxy logs no LiteLLM-specific fields
|
||||
|
||||
| LiteLLM specific field | Description | Example Value |
|
||||
|------------------------|-------------------------------------------------------|------------------------------------------------|
|
||||
| `cache_hit` | Indicates whether a cache hit occured (True) or not (False) | `true`, `false` |
|
||||
| `cache_key` | The Cache key used for this request | `d2b758c****`|
|
||||
| `proxy_base_url` | The base URL for the proxy server, the value of env var `PROXY_BASE_URL` on your server | `https://proxy.example.com`|
|
||||
| `user_api_key_alias` | An alias for the LiteLLM Virtual Key.| `prod-app1` |
|
||||
| `user_api_key_user_id` | The unique ID associated with a user's API key. | `user_123`, `user_456` |
|
||||
| `user_api_key_user_email` | The email associated with a user's API key. | `user@example.com`, `admin@example.com` |
|
||||
| `user_api_key_team_alias` | An alias for a team associated with an API key. | `team_alpha`, `dev_team` |
|
||||
| LiteLLM specific field | Description | Example Value |
|
||||
|---------------------------|-----------------------------------------------------------------------------------------|------------------------------------------------|
|
||||
| `cache_hit` | Indicates whether a cache hit occurred (True) or not (False) | `true`, `false` |
|
||||
| `cache_key` | The Cache key used for this request | `d2b758c****` |
|
||||
| `proxy_base_url` | The base URL for the proxy server, the value of env var `PROXY_BASE_URL` on your server | `https://proxy.example.com` |
|
||||
| `user_api_key_alias` | An alias for the LiteLLM Virtual Key. | `prod-app1` |
|
||||
| `user_api_key_user_id` | The unique ID associated with a user's API key. | `user_123`, `user_456` |
|
||||
| `user_api_key_user_email` | The email associated with a user's API key. | `user@example.com`, `admin@example.com` |
|
||||
| `user_api_key_team_alias` | An alias for a team associated with an API key. | `team_alpha`, `dev_team` |
|
||||
|
||||
|
||||
**Usage**
|
||||
|
@ -1190,7 +1190,7 @@ We will use the `--config` to set
|
|||
|
||||
- `litellm.success_callback = ["s3"]`
|
||||
|
||||
This will log all successfull LLM calls to s3 Bucket
|
||||
This will log all successful LLM calls to s3 Bucket
|
||||
|
||||
**Step 1** Set AWS Credentials in .env
|
||||
|
||||
|
@ -1279,7 +1279,7 @@ Log LLM Logs to [Azure Data Lake Storage](https://learn.microsoft.com/en-us/azur
|
|||
|
||||
| Property | Details |
|
||||
|----------|---------|
|
||||
| Description | Log LLM Input/Output to Azure Blob Storag (Bucket) |
|
||||
| Description | Log LLM Input/Output to Azure Blob Storage (Bucket) |
|
||||
| Azure Docs on Data Lake Storage | [Azure Data Lake Storage](https://learn.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-introduction) |
|
||||
|
||||
|
||||
|
@ -1360,7 +1360,7 @@ LiteLLM Supports logging to the following Datdog Integrations:
|
|||
<Tabs>
|
||||
<TabItem value="datadog" label="Datadog Logs">
|
||||
|
||||
We will use the `--config` to set `litellm.callbacks = ["datadog"]` this will log all successfull LLM calls to DataDog
|
||||
We will use the `--config` to set `litellm.callbacks = ["datadog"]` this will log all successful LLM calls to DataDog
|
||||
|
||||
**Step 1**: Create a `config.yaml` file and set `litellm_settings`: `success_callback`
|
||||
|
||||
|
@ -1636,7 +1636,7 @@ class MyCustomHandler(CustomLogger):
|
|||
litellm_params = kwargs.get("litellm_params", {})
|
||||
metadata = litellm_params.get("metadata", {}) # headers passed to LiteLLM proxy, can be found here
|
||||
|
||||
# Acess Exceptions & Traceback
|
||||
# Access Exceptions & Traceback
|
||||
exception_event = kwargs.get("exception", None)
|
||||
traceback_event = kwargs.get("traceback_exception", None)
|
||||
|
||||
|
@ -2205,7 +2205,7 @@ We will use the `--config` to set
|
|||
- `litellm.success_callback = ["dynamodb"]`
|
||||
- `litellm.dynamodb_table_name = "your-table-name"`
|
||||
|
||||
This will log all successfull LLM calls to DynamoDB
|
||||
This will log all successful LLM calls to DynamoDB
|
||||
|
||||
**Step 1** Set AWS Credentials in .env
|
||||
|
||||
|
@ -2370,7 +2370,7 @@ litellm --test
|
|||
|
||||
[Athina](https://athina.ai/) allows you to log LLM Input/Output for monitoring, analytics, and observability.
|
||||
|
||||
We will use the `--config` to set `litellm.success_callback = ["athina"]` this will log all successfull LLM calls to athina
|
||||
We will use the `--config` to set `litellm.success_callback = ["athina"]` this will log all successful LLM calls to athina
|
||||
|
||||
**Step 1** Set Athina API key
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ CMD ["--port", "4000", "--config", "./proxy_server_config.yaml"]
|
|||
|
||||
## 3. Use Redis 'port','host', 'password'. NOT 'redis_url'
|
||||
|
||||
If you decide to use Redis, DO NOT use 'redis_url'. We recommend usig redis port, host, and password params.
|
||||
If you decide to use Redis, DO NOT use 'redis_url'. We recommend using redis port, host, and password params.
|
||||
|
||||
`redis_url`is 80 RPS slower
|
||||
|
||||
|
@ -169,7 +169,7 @@ If you plan on using the DB, set a salt key for encrypting/decrypting variables
|
|||
|
||||
Do not change this after adding a model. It is used to encrypt / decrypt your LLM API Key credentials
|
||||
|
||||
We recommned - https://1password.com/password-generator/ password generator to get a random hash for litellm salt key.
|
||||
We recommend - https://1password.com/password-generator/ password generator to get a random hash for litellm salt key.
|
||||
|
||||
```bash
|
||||
export LITELLM_SALT_KEY="sk-1234"
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Set temporary budget increase for a LiteLLM Virtual Key. Use this if you get asked to increase the budget for a key temporarily.
|
||||
|
||||
|
||||
| Heirarchy | Supported |
|
||||
| Hierarchy | Supported |
|
||||
|-----------|-----------|
|
||||
| LiteLLM Virtual Key | ✅ |
|
||||
| User | ❌ |
|
||||
|
|
|
@ -4,7 +4,7 @@ import TabItem from '@theme/TabItem';
|
|||
|
||||
# Adding LLM Credentials
|
||||
|
||||
You can add LLM provider credentials on the UI. Once you add credentials you can re-use them when adding new models
|
||||
You can add LLM provider credentials on the UI. Once you add credentials you can reuse them when adding new models
|
||||
|
||||
## Add a credential + model
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ Requirements:
|
|||
- ** Set on config.yaml** set your master key under `general_settings:master_key`, example below
|
||||
- ** Set env variable** set `LITELLM_MASTER_KEY`
|
||||
|
||||
(the proxy Dockerfile checks if the `DATABASE_URL` is set and then intializes the DB connection)
|
||||
(the proxy Dockerfile checks if the `DATABASE_URL` is set and then initializes the DB connection)
|
||||
|
||||
```shell
|
||||
export DATABASE_URL=postgresql://<user>:<password>@<host>:<port>/<dbname>
|
||||
|
@ -333,7 +333,7 @@ curl http://localhost:4000/v1/chat/completions \
|
|||
|
||||
**Expected Response**
|
||||
|
||||
Expect to see a successfull response from the litellm proxy since the key passed in `X-Litellm-Key` is valid
|
||||
Expect to see a successful response from the litellm proxy since the key passed in `X-Litellm-Key` is valid
|
||||
```shell
|
||||
{"id":"chatcmpl-f9b2b79a7c30477ab93cd0e717d1773e","choices":[{"finish_reason":"stop","index":0,"message":{"content":"\n\nHello there, how may I assist you today?","role":"assistant","tool_calls":null,"function_call":null}}],"created":1677652288,"model":"gpt-3.5-turbo-0125","object":"chat.completion","system_fingerprint":"fp_44709d6fcb","usage":{"completion_tokens":12,"prompt_tokens":9,"total_tokens":21}
|
||||
```
|
||||
|
|
|
@ -994,16 +994,16 @@ litellm --health
|
|||
|
||||
## Logging Proxy Input/Output - OpenTelemetry
|
||||
|
||||
### Step 1 Start OpenTelemetry Collecter Docker Container
|
||||
### Step 1 Start OpenTelemetry Collector Docker Container
|
||||
This container sends logs to your selected destination
|
||||
|
||||
#### Install OpenTelemetry Collecter Docker Image
|
||||
#### Install OpenTelemetry Collector Docker Image
|
||||
```shell
|
||||
docker pull otel/opentelemetry-collector:0.90.0
|
||||
docker run -p 127.0.0.1:4317:4317 -p 127.0.0.1:55679:55679 otel/opentelemetry-collector:0.90.0
|
||||
```
|
||||
|
||||
#### Set Destination paths on OpenTelemetry Collecter
|
||||
#### Set Destination paths on OpenTelemetry Collector
|
||||
|
||||
Here's the OpenTelemetry yaml config to use with Elastic Search
|
||||
```yaml
|
||||
|
@ -1077,7 +1077,7 @@ general_settings:
|
|||
LiteLLM will read the `OTEL_ENDPOINT` environment variable to send data to your OTEL collector
|
||||
|
||||
```python
|
||||
os.environ['OTEL_ENDPOINT'] # defauls to 127.0.0.1:4317 if not provided
|
||||
os.environ['OTEL_ENDPOINT'] # defaults to 127.0.0.1:4317 if not provided
|
||||
```
|
||||
|
||||
#### Start LiteLLM Proxy
|
||||
|
@ -1101,8 +1101,8 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \
|
|||
```
|
||||
|
||||
|
||||
#### Test & View Logs on OpenTelemetry Collecter
|
||||
On successfull logging you should be able to see this log on your `OpenTelemetry Collecter` Docker Container
|
||||
#### Test & View Logs on OpenTelemetry Collector
|
||||
On successful logging you should be able to see this log on your `OpenTelemetry Collector` Docker Container
|
||||
```shell
|
||||
Events:
|
||||
SpanEvent #0
|
||||
|
@ -1149,7 +1149,7 @@ Here's the log view on Elastic Search. You can see the request `input`, `output`
|
|||
<Image img={require('../img/elastic_otel.png')} />
|
||||
|
||||
## Logging Proxy Input/Output - Langfuse
|
||||
We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse
|
||||
We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successful LLM calls to langfuse
|
||||
|
||||
**Step 1** Install langfuse
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ response = completion("command-nightly", messages)
|
|||
"""
|
||||
|
||||
|
||||
# qustions/logs you want to run the LLM on
|
||||
# questions/logs you want to run the LLM on
|
||||
questions = [
|
||||
"what is litellm?",
|
||||
"why should I use LiteLLM",
|
||||
|
|
|
@ -30,7 +30,7 @@ def inference(message, history):
|
|||
yield partial_message
|
||||
except Exception as e:
|
||||
print("Exception encountered:", str(e))
|
||||
yield f"An Error occured please 'Clear' the error and try your question again"
|
||||
yield f"An Error occurred please 'Clear' the error and try your question again"
|
||||
```
|
||||
|
||||
### Define Chat Interface
|
||||
|
|
|
@ -57,6 +57,7 @@ from litellm.llms.vertex_ai.image_generation.cost_calculator import (
|
|||
from litellm.responses.utils import ResponseAPILoggingUtils
|
||||
from litellm.types.llms.openai import (
|
||||
HttpxBinaryResponseContent,
|
||||
ImageGenerationRequestQuality,
|
||||
OpenAIRealtimeStreamList,
|
||||
OpenAIRealtimeStreamResponseBaseObject,
|
||||
OpenAIRealtimeStreamSessionEvents,
|
||||
|
@ -642,9 +643,9 @@ def completion_cost( # noqa: PLR0915
|
|||
or isinstance(completion_response, dict)
|
||||
): # tts returns a custom class
|
||||
if isinstance(completion_response, dict):
|
||||
usage_obj: Optional[
|
||||
Union[dict, Usage]
|
||||
] = completion_response.get("usage", {})
|
||||
usage_obj: Optional[Union[dict, Usage]] = (
|
||||
completion_response.get("usage", {})
|
||||
)
|
||||
else:
|
||||
usage_obj = getattr(completion_response, "usage", {})
|
||||
if isinstance(usage_obj, BaseModel) and not _is_known_usage_objects(
|
||||
|
@ -913,7 +914,7 @@ def completion_cost( # noqa: PLR0915
|
|||
|
||||
|
||||
def get_response_cost_from_hidden_params(
|
||||
hidden_params: Union[dict, BaseModel]
|
||||
hidden_params: Union[dict, BaseModel],
|
||||
) -> Optional[float]:
|
||||
if isinstance(hidden_params, BaseModel):
|
||||
_hidden_params_dict = hidden_params.model_dump()
|
||||
|
@ -1101,30 +1102,37 @@ def default_image_cost_calculator(
|
|||
f"{quality}/{base_model_name}" if quality else base_model_name
|
||||
)
|
||||
|
||||
# gpt-image-1 models use low, medium, high quality. If user did not specify quality, use medium fot gpt-image-1 model family
|
||||
model_name_with_v2_quality = (
|
||||
f"{ImageGenerationRequestQuality.MEDIUM.value}/{base_model_name}"
|
||||
)
|
||||
|
||||
verbose_logger.debug(
|
||||
f"Looking up cost for models: {model_name_with_quality}, {base_model_name}"
|
||||
)
|
||||
|
||||
# Try model with quality first, fall back to base model name
|
||||
if model_name_with_quality in litellm.model_cost:
|
||||
cost_info = litellm.model_cost[model_name_with_quality]
|
||||
elif base_model_name in litellm.model_cost:
|
||||
cost_info = litellm.model_cost[base_model_name]
|
||||
else:
|
||||
# Try without provider prefix
|
||||
model_without_provider = f"{size_str}/{model.split('/')[-1]}"
|
||||
model_with_quality_without_provider = (
|
||||
f"{quality}/{model_without_provider}" if quality else model_without_provider
|
||||
)
|
||||
model_without_provider = f"{size_str}/{model.split('/')[-1]}"
|
||||
model_with_quality_without_provider = (
|
||||
f"{quality}/{model_without_provider}" if quality else model_without_provider
|
||||
)
|
||||
|
||||
if model_with_quality_without_provider in litellm.model_cost:
|
||||
cost_info = litellm.model_cost[model_with_quality_without_provider]
|
||||
elif model_without_provider in litellm.model_cost:
|
||||
cost_info = litellm.model_cost[model_without_provider]
|
||||
else:
|
||||
raise Exception(
|
||||
f"Model not found in cost map. Tried {model_name_with_quality}, {base_model_name}, {model_with_quality_without_provider}, and {model_without_provider}"
|
||||
)
|
||||
# Try model with quality first, fall back to base model name
|
||||
cost_info: Optional[dict] = None
|
||||
models_to_check = [
|
||||
model_name_with_quality,
|
||||
base_model_name,
|
||||
model_name_with_v2_quality,
|
||||
model_with_quality_without_provider,
|
||||
model_without_provider,
|
||||
]
|
||||
for model in models_to_check:
|
||||
if model in litellm.model_cost:
|
||||
cost_info = litellm.model_cost[model]
|
||||
break
|
||||
if cost_info is None:
|
||||
raise Exception(
|
||||
f"Model not found in cost map. Tried checking {models_to_check}"
|
||||
)
|
||||
|
||||
return cost_info["input_cost_per_pixel"] * height * width * n
|
||||
|
||||
|
|
|
@ -311,6 +311,9 @@ def exception_type( # type: ignore # noqa: PLR0915
|
|||
elif (
|
||||
"invalid_request_error" in error_str
|
||||
and "content_policy_violation" in error_str
|
||||
) or (
|
||||
"Invalid prompt" in error_str
|
||||
and "violating our usage policy" in error_str
|
||||
):
|
||||
exception_mapping_worked = True
|
||||
raise ContentPolicyViolationError(
|
||||
|
|
|
@ -95,6 +95,35 @@ class AzureOpenAIResponsesAPIConfig(OpenAIResponsesAPIConfig):
|
|||
#########################################################
|
||||
########## DELETE RESPONSE API TRANSFORMATION ##############
|
||||
#########################################################
|
||||
def _construct_url_for_response_id_in_path(
|
||||
self, api_base: str, response_id: str
|
||||
) -> str:
|
||||
"""
|
||||
Constructs a URL for the API request with the response_id in the path.
|
||||
"""
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
# Parse the URL to separate its components
|
||||
parsed_url = urlparse(api_base)
|
||||
|
||||
# Insert the response_id at the end of the path component
|
||||
# Remove trailing slash if present to avoid double slashes
|
||||
path = parsed_url.path.rstrip("/")
|
||||
new_path = f"{path}/{response_id}"
|
||||
|
||||
# Reconstruct the URL with all original components but with the modified path
|
||||
constructed_url = urlunparse(
|
||||
(
|
||||
parsed_url.scheme, # http, https
|
||||
parsed_url.netloc, # domain name, port
|
||||
new_path, # path with response_id added
|
||||
parsed_url.params, # parameters
|
||||
parsed_url.query, # query string
|
||||
parsed_url.fragment, # fragment
|
||||
)
|
||||
)
|
||||
return constructed_url
|
||||
|
||||
def transform_delete_response_api_request(
|
||||
self,
|
||||
response_id: str,
|
||||
|
@ -111,28 +140,33 @@ class AzureOpenAIResponsesAPIConfig(OpenAIResponsesAPIConfig):
|
|||
This function handles URLs with query parameters by inserting the response_id
|
||||
at the correct location (before any query parameters).
|
||||
"""
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
# Parse the URL to separate its components
|
||||
parsed_url = urlparse(api_base)
|
||||
|
||||
# Insert the response_id at the end of the path component
|
||||
# Remove trailing slash if present to avoid double slashes
|
||||
path = parsed_url.path.rstrip("/")
|
||||
new_path = f"{path}/{response_id}"
|
||||
|
||||
# Reconstruct the URL with all original components but with the modified path
|
||||
delete_url = urlunparse(
|
||||
(
|
||||
parsed_url.scheme, # http, https
|
||||
parsed_url.netloc, # domain name, port
|
||||
new_path, # path with response_id added
|
||||
parsed_url.params, # parameters
|
||||
parsed_url.query, # query string
|
||||
parsed_url.fragment, # fragment
|
||||
)
|
||||
delete_url = self._construct_url_for_response_id_in_path(
|
||||
api_base=api_base, response_id=response_id
|
||||
)
|
||||
|
||||
data: Dict = {}
|
||||
verbose_logger.debug(f"delete response url={delete_url}")
|
||||
return delete_url, data
|
||||
|
||||
#########################################################
|
||||
########## GET RESPONSE API TRANSFORMATION ###############
|
||||
#########################################################
|
||||
def transform_get_response_api_request(
|
||||
self,
|
||||
response_id: str,
|
||||
api_base: str,
|
||||
litellm_params: GenericLiteLLMParams,
|
||||
headers: dict,
|
||||
) -> Tuple[str, Dict]:
|
||||
"""
|
||||
Transform the get response API request into a URL and data
|
||||
|
||||
OpenAI API expects the following request
|
||||
- GET /v1/responses/{response_id}
|
||||
"""
|
||||
get_url = self._construct_url_for_response_id_in_path(
|
||||
api_base=api_base, response_id=response_id
|
||||
)
|
||||
data: Dict = {}
|
||||
verbose_logger.debug(f"get response url={get_url}")
|
||||
return get_url, data
|
||||
|
|
|
@ -141,9 +141,34 @@ class BaseResponsesAPIConfig(ABC):
|
|||
pass
|
||||
|
||||
#########################################################
|
||||
########## END DELETE RESPONSE API TRANSFORMATION ##########
|
||||
########## END DELETE RESPONSE API TRANSFORMATION #######
|
||||
#########################################################
|
||||
|
||||
#########################################################
|
||||
########## GET RESPONSE API TRANSFORMATION ###############
|
||||
#########################################################
|
||||
@abstractmethod
|
||||
def transform_get_response_api_request(
|
||||
self,
|
||||
response_id: str,
|
||||
api_base: str,
|
||||
litellm_params: GenericLiteLLMParams,
|
||||
headers: dict,
|
||||
) -> Tuple[str, Dict]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def transform_get_response_api_response(
|
||||
self,
|
||||
raw_response: httpx.Response,
|
||||
logging_obj: LiteLLMLoggingObj,
|
||||
) -> ResponsesAPIResponse:
|
||||
pass
|
||||
|
||||
#########################################################
|
||||
########## END GET RESPONSE API TRANSFORMATION ##########
|
||||
#########################################################
|
||||
|
||||
def get_error_class(
|
||||
self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers]
|
||||
) -> BaseLLMException:
|
||||
|
|
|
@ -107,6 +107,15 @@ class AmazonConverseConfig(BaseConfig):
|
|||
"response_format",
|
||||
]
|
||||
|
||||
if (
|
||||
"arn" in model
|
||||
): # we can't infer the model from the arn, so just add all params
|
||||
supported_params.append("tools")
|
||||
supported_params.append("tool_choice")
|
||||
supported_params.append("thinking")
|
||||
supported_params.append("reasoning_effort")
|
||||
return supported_params
|
||||
|
||||
## Filter out 'cross-region' from model name
|
||||
base_model = BedrockModelInfo.get_base_model(model)
|
||||
|
||||
|
|
|
@ -1426,6 +1426,162 @@ class BaseLLMHTTPHandler:
|
|||
logging_obj=logging_obj,
|
||||
)
|
||||
|
||||
def get_responses(
|
||||
self,
|
||||
response_id: str,
|
||||
responses_api_provider_config: BaseResponsesAPIConfig,
|
||||
litellm_params: GenericLiteLLMParams,
|
||||
logging_obj: LiteLLMLoggingObj,
|
||||
custom_llm_provider: Optional[str] = None,
|
||||
extra_headers: Optional[Dict[str, Any]] = None,
|
||||
extra_body: Optional[Dict[str, Any]] = None,
|
||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
|
||||
_is_async: bool = False,
|
||||
) -> Union[ResponsesAPIResponse, Coroutine[Any, Any, ResponsesAPIResponse]]:
|
||||
"""
|
||||
Get a response by ID
|
||||
Uses GET /v1/responses/{response_id} endpoint in the responses API
|
||||
"""
|
||||
if _is_async:
|
||||
return self.async_get_responses(
|
||||
response_id=response_id,
|
||||
responses_api_provider_config=responses_api_provider_config,
|
||||
litellm_params=litellm_params,
|
||||
logging_obj=logging_obj,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
extra_headers=extra_headers,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
client=client,
|
||||
)
|
||||
|
||||
if client is None or not isinstance(client, HTTPHandler):
|
||||
sync_httpx_client = _get_httpx_client(
|
||||
params={"ssl_verify": litellm_params.get("ssl_verify", None)}
|
||||
)
|
||||
else:
|
||||
sync_httpx_client = client
|
||||
|
||||
headers = responses_api_provider_config.validate_environment(
|
||||
api_key=litellm_params.api_key,
|
||||
headers=extra_headers or {},
|
||||
model="None",
|
||||
)
|
||||
|
||||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
|
||||
api_base = responses_api_provider_config.get_complete_url(
|
||||
api_base=litellm_params.api_base,
|
||||
litellm_params=dict(litellm_params),
|
||||
)
|
||||
|
||||
url, data = responses_api_provider_config.transform_get_response_api_request(
|
||||
response_id=response_id,
|
||||
api_base=api_base,
|
||||
litellm_params=litellm_params,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
## LOGGING
|
||||
logging_obj.pre_call(
|
||||
input="",
|
||||
api_key="",
|
||||
additional_args={
|
||||
"complete_input_dict": data,
|
||||
"api_base": api_base,
|
||||
"headers": headers,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
response = sync_httpx_client.get(
|
||||
url=url, headers=headers, params=data
|
||||
)
|
||||
except Exception as e:
|
||||
raise self._handle_error(
|
||||
e=e,
|
||||
provider_config=responses_api_provider_config,
|
||||
)
|
||||
|
||||
return responses_api_provider_config.transform_get_response_api_response(
|
||||
raw_response=response,
|
||||
logging_obj=logging_obj,
|
||||
)
|
||||
|
||||
async def async_get_responses(
|
||||
self,
|
||||
response_id: str,
|
||||
responses_api_provider_config: BaseResponsesAPIConfig,
|
||||
litellm_params: GenericLiteLLMParams,
|
||||
logging_obj: LiteLLMLoggingObj,
|
||||
custom_llm_provider: Optional[str] = None,
|
||||
extra_headers: Optional[Dict[str, Any]] = None,
|
||||
extra_body: Optional[Dict[str, Any]] = None,
|
||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
|
||||
) -> ResponsesAPIResponse:
|
||||
"""
|
||||
Async version of get_responses
|
||||
"""
|
||||
if client is None or not isinstance(client, AsyncHTTPHandler):
|
||||
async_httpx_client = get_async_httpx_client(
|
||||
llm_provider=litellm.LlmProviders(custom_llm_provider),
|
||||
params={"ssl_verify": litellm_params.get("ssl_verify", None)},
|
||||
)
|
||||
else:
|
||||
async_httpx_client = client
|
||||
|
||||
headers = responses_api_provider_config.validate_environment(
|
||||
api_key=litellm_params.api_key,
|
||||
headers=extra_headers or {},
|
||||
model="None",
|
||||
)
|
||||
|
||||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
|
||||
api_base = responses_api_provider_config.get_complete_url(
|
||||
api_base=litellm_params.api_base,
|
||||
litellm_params=dict(litellm_params),
|
||||
)
|
||||
|
||||
url, data = responses_api_provider_config.transform_get_response_api_request(
|
||||
response_id=response_id,
|
||||
api_base=api_base,
|
||||
litellm_params=litellm_params,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
## LOGGING
|
||||
logging_obj.pre_call(
|
||||
input="",
|
||||
api_key="",
|
||||
additional_args={
|
||||
"complete_input_dict": data,
|
||||
"api_base": api_base,
|
||||
"headers": headers,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
response = await async_httpx_client.get(
|
||||
url=url, headers=headers, params=data
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
verbose_logger.exception(f"Error retrieving response: {e}")
|
||||
raise self._handle_error(
|
||||
e=e,
|
||||
provider_config=responses_api_provider_config,
|
||||
)
|
||||
|
||||
return responses_api_provider_config.transform_get_response_api_response(
|
||||
raw_response=response,
|
||||
logging_obj=logging_obj,
|
||||
)
|
||||
|
||||
def create_file(
|
||||
self,
|
||||
create_file_data: CreateFileRequest,
|
||||
|
|
|
@ -250,3 +250,39 @@ class OpenAIResponsesAPIConfig(BaseResponsesAPIConfig):
|
|||
message=raw_response.text, status_code=raw_response.status_code
|
||||
)
|
||||
return DeleteResponseResult(**raw_response_json)
|
||||
|
||||
#########################################################
|
||||
########## GET RESPONSE API TRANSFORMATION ###############
|
||||
#########################################################
|
||||
def transform_get_response_api_request(
|
||||
self,
|
||||
response_id: str,
|
||||
api_base: str,
|
||||
litellm_params: GenericLiteLLMParams,
|
||||
headers: dict,
|
||||
) -> Tuple[str, Dict]:
|
||||
"""
|
||||
Transform the get response API request into a URL and data
|
||||
|
||||
OpenAI API expects the following request
|
||||
- GET /v1/responses/{response_id}
|
||||
"""
|
||||
url = f"{api_base}/{response_id}"
|
||||
data: Dict = {}
|
||||
return url, data
|
||||
|
||||
def transform_get_response_api_response(
|
||||
self,
|
||||
raw_response: httpx.Response,
|
||||
logging_obj: LiteLLMLoggingObj,
|
||||
) -> ResponsesAPIResponse:
|
||||
"""
|
||||
Transform the get response API response into a ResponsesAPIResponse
|
||||
"""
|
||||
try:
|
||||
raw_response_json = raw_response.json()
|
||||
except Exception:
|
||||
raise OpenAIError(
|
||||
message=raw_response.text, status_code=raw_response.status_code
|
||||
)
|
||||
return ResponsesAPIResponse(**raw_response_json)
|
||||
|
|
|
@ -182,6 +182,7 @@ from .types.llms.openai import (
|
|||
ChatCompletionPredictionContentParam,
|
||||
ChatCompletionUserMessage,
|
||||
HttpxBinaryResponseContent,
|
||||
ImageGenerationRequestQuality,
|
||||
)
|
||||
from .types.utils import (
|
||||
LITELLM_IMAGE_VARIATION_PROVIDERS,
|
||||
|
@ -2688,9 +2689,9 @@ def completion( # type: ignore # noqa: PLR0915
|
|||
"aws_region_name" not in optional_params
|
||||
or optional_params["aws_region_name"] is None
|
||||
):
|
||||
optional_params[
|
||||
"aws_region_name"
|
||||
] = aws_bedrock_client.meta.region_name
|
||||
optional_params["aws_region_name"] = (
|
||||
aws_bedrock_client.meta.region_name
|
||||
)
|
||||
|
||||
bedrock_route = BedrockModelInfo.get_bedrock_route(model)
|
||||
if bedrock_route == "converse":
|
||||
|
@ -4412,9 +4413,9 @@ def adapter_completion(
|
|||
new_kwargs = translation_obj.translate_completion_input_params(kwargs=kwargs)
|
||||
|
||||
response: Union[ModelResponse, CustomStreamWrapper] = completion(**new_kwargs) # type: ignore
|
||||
translated_response: Optional[
|
||||
Union[BaseModel, AdapterCompletionStreamWrapper]
|
||||
] = None
|
||||
translated_response: Optional[Union[BaseModel, AdapterCompletionStreamWrapper]] = (
|
||||
None
|
||||
)
|
||||
if isinstance(response, ModelResponse):
|
||||
translated_response = translation_obj.translate_completion_output_params(
|
||||
response=response
|
||||
|
@ -4567,7 +4568,7 @@ def image_generation( # noqa: PLR0915
|
|||
prompt: str,
|
||||
model: Optional[str] = None,
|
||||
n: Optional[int] = None,
|
||||
quality: Optional[str] = None,
|
||||
quality: Optional[Union[str, ImageGenerationRequestQuality]] = None,
|
||||
response_format: Optional[str] = None,
|
||||
size: Optional[str] = None,
|
||||
style: Optional[str] = None,
|
||||
|
@ -5834,9 +5835,9 @@ def stream_chunk_builder( # noqa: PLR0915
|
|||
]
|
||||
|
||||
if len(content_chunks) > 0:
|
||||
response["choices"][0]["message"][
|
||||
"content"
|
||||
] = processor.get_combined_content(content_chunks)
|
||||
response["choices"][0]["message"]["content"] = (
|
||||
processor.get_combined_content(content_chunks)
|
||||
)
|
||||
|
||||
reasoning_chunks = [
|
||||
chunk
|
||||
|
@ -5847,9 +5848,9 @@ def stream_chunk_builder( # noqa: PLR0915
|
|||
]
|
||||
|
||||
if len(reasoning_chunks) > 0:
|
||||
response["choices"][0]["message"][
|
||||
"reasoning_content"
|
||||
] = processor.get_combined_reasoning_content(reasoning_chunks)
|
||||
response["choices"][0]["message"]["reasoning_content"] = (
|
||||
processor.get_combined_reasoning_content(reasoning_chunks)
|
||||
)
|
||||
|
||||
audio_chunks = [
|
||||
chunk
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Subproject commit bf0485467c343957ba5c217db777f407b2e65453
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{35677:function(n,e,t){Promise.resolve().then(t.t.bind(t,39974,23)),Promise.resolve().then(t.t.bind(t,2778,23))},2778:function(){},39974:function(n){n.exports={style:{fontFamily:"'__Inter_cf7686', '__Inter_Fallback_cf7686'",fontStyle:"normal"},className:"__className_cf7686"}}},function(n){n.O(0,[919,986,971,117,744],function(){return n(n.s=35677)}),_N_E=n.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{96443:function(n,e,t){Promise.resolve().then(t.t.bind(t,39974,23)),Promise.resolve().then(t.t.bind(t,2778,23))},2778:function(){},39974:function(n){n.exports={style:{fontFamily:"'__Inter_cf7686', '__Inter_Fallback_cf7686'",fontStyle:"normal"},className:"__className_cf7686"}}},function(n){n.O(0,[919,986,971,117,744],function(){return n(n.s=96443)}),_N_E=n.O()}]);
|
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[418],{56668:function(e,n,u){Promise.resolve().then(u.bind(u,52829))},52829:function(e,n,u){"use strict";u.r(n),u.d(n,{default:function(){return f}});var t=u(57437),s=u(2265),r=u(99376),c=u(92699);function f(){let e=(0,r.useSearchParams)().get("key"),[n,u]=(0,s.useState)(null);return(0,s.useEffect)(()=>{e&&u(e)},[e]),(0,t.jsx)(c.Z,{accessToken:n,publicPage:!0,premiumUser:!1})}}},function(e){e.O(0,[42,261,250,699,971,117,744],function(){return e(e.s=56668)}),_N_E=e.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[418],{21024:function(e,n,u){Promise.resolve().then(u.bind(u,52829))},52829:function(e,n,u){"use strict";u.r(n),u.d(n,{default:function(){return f}});var t=u(57437),s=u(2265),r=u(99376),c=u(92699);function f(){let e=(0,r.useSearchParams)().get("key"),[n,u]=(0,s.useState)(null);return(0,s.useEffect)(()=>{e&&u(e)},[e]),(0,t.jsx)(c.Z,{accessToken:n,publicPage:!0,premiumUser:!1})}}},function(e){e.O(0,[42,261,250,699,971,117,744],function(){return e(e.s=21024)}),_N_E=e.O()}]);
|
|
@ -0,0 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{8672:function(e,t,n){Promise.resolve().then(n.bind(n,12011))},12011:function(e,t,n){"use strict";n.r(t),n.d(t,{default:function(){return S}});var s=n(57437),o=n(2265),a=n(99376),c=n(20831),i=n(94789),l=n(12514),r=n(49804),u=n(67101),m=n(84264),d=n(49566),h=n(96761),x=n(84566),p=n(19250),f=n(14474),k=n(13634),g=n(73002),j=n(3914);function S(){let[e]=k.Z.useForm(),t=(0,a.useSearchParams)();(0,j.e)("token");let n=t.get("invitation_id"),[S,w]=(0,o.useState)(null),[Z,_]=(0,o.useState)(""),[N,b]=(0,o.useState)(""),[T,y]=(0,o.useState)(null),[E,v]=(0,o.useState)(""),[C,U]=(0,o.useState)("");return(0,o.useEffect)(()=>{n&&(0,p.W_)(n).then(e=>{let t=e.login_url;console.log("login_url:",t),v(t);let n=e.token,s=(0,f.o)(n);U(n),console.log("decoded:",s),w(s.key),console.log("decoded user email:",s.user_email),b(s.user_email),y(s.user_id)})},[n]),(0,s.jsx)("div",{className:"mx-auto w-full max-w-md mt-10",children:(0,s.jsxs)(l.Z,{children:[(0,s.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,s.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,s.jsx)(m.Z,{children:"Claim your user account to login to Admin UI."}),(0,s.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,s.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,s.jsx)(r.Z,{children:"SSO is under the Enterprise Tier."}),(0,s.jsx)(r.Z,{children:(0,s.jsx)(c.Z,{variant:"primary",className:"mb-2",children:(0,s.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,s.jsxs)(k.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",S,"token:",C,"formValues:",e),S&&C&&(e.user_email=N,T&&n&&(0,p.m_)(S,n,T,e.password).then(e=>{let t="/ui/";t+="?login=success",document.cookie="token="+C,console.log("redirecting to:",t),window.location.href=t}))},children:[(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(k.Z.Item,{label:"Email Address",name:"user_email",children:(0,s.jsx)(d.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,s.jsx)(k.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,s.jsx)(d.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,s.jsx)("div",{className:"mt-10",children:(0,s.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}},3914:function(e,t,n){"use strict";function s(){let e=window.location.hostname,t=["Lax","Strict","None"];["/","/ui"].forEach(n=>{document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,";"),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,";"),t.forEach(t=>{let s="None"===t?" Secure;":"";document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; SameSite=").concat(t,";").concat(s),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,"; SameSite=").concat(t,";").concat(s)})}),console.log("After clearing cookies:",document.cookie)}function o(e){let t=document.cookie.split("; ").find(t=>t.startsWith(e+"="));return t?t.split("=")[1]:null}n.d(t,{b:function(){return s},e:function(){return o}})}},function(e){e.O(0,[665,42,899,250,971,117,744],function(){return e(e.s=8672)}),_N_E=e.O()}]);
|
|
@ -1 +0,0 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{23781:function(e,t,n){Promise.resolve().then(n.bind(n,12011))},12011:function(e,t,n){"use strict";n.r(t),n.d(t,{default:function(){return S}});var s=n(57437),o=n(2265),a=n(99376),c=n(20831),i=n(94789),l=n(12514),r=n(49804),u=n(67101),m=n(84264),d=n(49566),h=n(96761),x=n(84566),p=n(19250),f=n(14474),k=n(13634),g=n(73002),j=n(3914);function S(){let[e]=k.Z.useForm(),t=(0,a.useSearchParams)();(0,j.e)("token");let n=t.get("invitation_id"),[S,w]=(0,o.useState)(null),[Z,_]=(0,o.useState)(""),[N,b]=(0,o.useState)(""),[T,y]=(0,o.useState)(null),[E,v]=(0,o.useState)(""),[C,U]=(0,o.useState)("");return(0,o.useEffect)(()=>{n&&(0,p.W_)(n).then(e=>{let t=e.login_url;console.log("login_url:",t),v(t);let n=e.token,s=(0,f.o)(n);U(n),console.log("decoded:",s),w(s.key),console.log("decoded user email:",s.user_email),b(s.user_email),y(s.user_id)})},[n]),(0,s.jsx)("div",{className:"mx-auto w-full max-w-md mt-10",children:(0,s.jsxs)(l.Z,{children:[(0,s.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,s.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,s.jsx)(m.Z,{children:"Claim your user account to login to Admin UI."}),(0,s.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,s.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,s.jsx)(r.Z,{children:"SSO is under the Enterprise Tier."}),(0,s.jsx)(r.Z,{children:(0,s.jsx)(c.Z,{variant:"primary",className:"mb-2",children:(0,s.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,s.jsxs)(k.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",S,"token:",C,"formValues:",e),S&&C&&(e.user_email=N,T&&n&&(0,p.m_)(S,n,T,e.password).then(e=>{let t="/ui/";t+="?login=success",document.cookie="token="+C,console.log("redirecting to:",t),window.location.href=t}))},children:[(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(k.Z.Item,{label:"Email Address",name:"user_email",children:(0,s.jsx)(d.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,s.jsx)(k.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,s.jsx)(d.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,s.jsx)("div",{className:"mt-10",children:(0,s.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}},3914:function(e,t,n){"use strict";function s(){let e=window.location.hostname,t=["Lax","Strict","None"];["/","/ui"].forEach(n=>{document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,";"),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,";"),t.forEach(t=>{let s="None"===t?" Secure;":"";document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; SameSite=").concat(t,";").concat(s),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,"; SameSite=").concat(t,";").concat(s)})}),console.log("After clearing cookies:",document.cookie)}function o(e){let t=document.cookie.split("; ").find(t=>t.startsWith(e+"="));return t?t.split("=")[1]:null}n.d(t,{b:function(){return s},e:function(){return o}})}},function(e){e.O(0,[665,42,899,250,971,117,744],function(){return e(e.s=23781)}),_N_E=e.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{35618:function(e,n,t){Promise.resolve().then(t.t.bind(t,12846,23)),Promise.resolve().then(t.t.bind(t,19107,23)),Promise.resolve().then(t.t.bind(t,61060,23)),Promise.resolve().then(t.t.bind(t,4707,23)),Promise.resolve().then(t.t.bind(t,80,23)),Promise.resolve().then(t.t.bind(t,36423,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,117],function(){return n(54278),n(35618)}),_N_E=e.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{10264:function(e,n,t){Promise.resolve().then(t.t.bind(t,12846,23)),Promise.resolve().then(t.t.bind(t,19107,23)),Promise.resolve().then(t.t.bind(t,61060,23)),Promise.resolve().then(t.t.bind(t,4707,23)),Promise.resolve().then(t.t.bind(t,80,23)),Promise.resolve().then(t.t.bind(t,36423,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,117],function(){return n(54278),n(10264)}),_N_E=e.O()}]);
|
|
@ -1 +1 @@
|
|||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-75a5453f51d60261.js"/><script src="/ui/_next/static/chunks/fd9d1056-205af899b895cbac.js" async=""></script><script src="/ui/_next/static/chunks/117-1c5bfc45bfc4237d.js" async=""></script><script src="/ui/_next/static/chunks/main-app-2b16cdb7ff4e1af7.js" async=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-75a5453f51d60261.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"style\"]\n3:HL[\"/ui/_next/static/css/005c96178151b9fd.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"4:I[12846,[],\"\"]\n6:I[19107,[],\"ClientPageRoot\"]\n7:I[76737,[\"665\",\"static/chunks/3014691f-b7b79b78e27792f3.js\",\"990\",\"static/chunks/13b76428-ebdf3012af0e4489.js\",\"42\",\"static/chunks/42-69f5b4e6a9942a9f.js\",\"261\",\"static/chunks/261-ee7f0f1f1c8c22a0.js\",\"899\",\"static/chunks/899-57685cedd1dcbc78.js\",\"466\",\"static/chunks/466-65538e7f331af98e.js\",\"250\",\"static/chunks/250-7d480872c0e251dc.js\",\"699\",\"static/chunks/699-2176ba2273e4676d.js\",\"931\",\"static/chunks/app/page-36914b80c40b5032.js\"],\"default\",1]\n8:I[4707,[],\"\"]\n9:I[36423,[],\"\"]\nb:I[61060,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"0:[\"$\",\"$L4\",null,{\"buildId\":\"fzhvjOFL6KeNsWYrLD4ya\",\"assetPrefix\":\"/ui\",\"urlParts\":[\"\",\"\"],\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[[\"$L5\",[\"$\",\"$L6\",null,{\"props\":{\"params\":{},\"searchParams\":{}},\"Component\":\"$7\"}],null],null],null]},[[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}],[\"$\",\"link\",\"1\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/005c96178151b9fd.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_cf7686\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]],null],null],\"couldBeIntercepted\":false,\"initialHead\":[null,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script></body></html>
|
||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-75a5453f51d60261.js"/><script src="/ui/_next/static/chunks/fd9d1056-205af899b895cbac.js" async=""></script><script src="/ui/_next/static/chunks/117-1c5bfc45bfc4237d.js" async=""></script><script src="/ui/_next/static/chunks/main-app-4f7318ae681a6d94.js" async=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-75a5453f51d60261.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"style\"]\n3:HL[\"/ui/_next/static/css/005c96178151b9fd.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"4:I[12846,[],\"\"]\n6:I[19107,[],\"ClientPageRoot\"]\n7:I[69451,[\"665\",\"static/chunks/3014691f-b7b79b78e27792f3.js\",\"990\",\"static/chunks/13b76428-ebdf3012af0e4489.js\",\"42\",\"static/chunks/42-014374badc35fe9b.js\",\"261\",\"static/chunks/261-92d8946249b3296e.js\",\"899\",\"static/chunks/899-54ea329f41297bf0.js\",\"978\",\"static/chunks/978-3e0bd2034b623309.js\",\"250\",\"static/chunks/250-e4cc2ceb9ff1c37a.js\",\"699\",\"static/chunks/699-f4066c747670f979.js\",\"931\",\"static/chunks/app/page-9bd76bfe1ce0a80a.js\"],\"default\",1]\n8:I[4707,[],\"\"]\n9:I[36423,[],\"\"]\nb:I[61060,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"0:[\"$\",\"$L4\",null,{\"buildId\":\"sh3mKTgIKifNl8lsgZ675\",\"assetPrefix\":\"/ui\",\"urlParts\":[\"\",\"\"],\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[[\"$L5\",[\"$\",\"$L6\",null,{\"props\":{\"params\":{},\"searchParams\":{}},\"Component\":\"$7\"}],null],null],null]},[[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}],[\"$\",\"link\",\"1\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/005c96178151b9fd.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_cf7686\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]],null],null],\"couldBeIntercepted\":false,\"initialHead\":[null,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[76737,["665","static/chunks/3014691f-b7b79b78e27792f3.js","990","static/chunks/13b76428-ebdf3012af0e4489.js","42","static/chunks/42-69f5b4e6a9942a9f.js","261","static/chunks/261-ee7f0f1f1c8c22a0.js","899","static/chunks/899-57685cedd1dcbc78.js","466","static/chunks/466-65538e7f331af98e.js","250","static/chunks/250-7d480872c0e251dc.js","699","static/chunks/699-2176ba2273e4676d.js","931","static/chunks/app/page-36914b80c40b5032.js"],"default",1]
|
||||
3:I[69451,["665","static/chunks/3014691f-b7b79b78e27792f3.js","990","static/chunks/13b76428-ebdf3012af0e4489.js","42","static/chunks/42-014374badc35fe9b.js","261","static/chunks/261-92d8946249b3296e.js","899","static/chunks/899-54ea329f41297bf0.js","978","static/chunks/978-3e0bd2034b623309.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","699","static/chunks/699-f4066c747670f979.js","931","static/chunks/app/page-9bd76bfe1ce0a80a.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[52829,["42","static/chunks/42-69f5b4e6a9942a9f.js","261","static/chunks/261-ee7f0f1f1c8c22a0.js","250","static/chunks/250-7d480872c0e251dc.js","699","static/chunks/699-2176ba2273e4676d.js","418","static/chunks/app/model_hub/page-a965e43ba9638156.js"],"default",1]
|
||||
3:I[52829,["42","static/chunks/42-014374badc35fe9b.js","261","static/chunks/261-92d8946249b3296e.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","699","static/chunks/699-f4066c747670f979.js","418","static/chunks/app/model_hub/page-3d2c374ee41b38e5.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[12011,["665","static/chunks/3014691f-b7b79b78e27792f3.js","42","static/chunks/42-69f5b4e6a9942a9f.js","899","static/chunks/899-57685cedd1dcbc78.js","250","static/chunks/250-7d480872c0e251dc.js","461","static/chunks/app/onboarding/page-9598003bc1e91371.js"],"default",1]
|
||||
3:I[12011,["665","static/chunks/3014691f-b7b79b78e27792f3.js","42","static/chunks/42-014374badc35fe9b.js","899","static/chunks/899-54ea329f41297bf0.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","461","static/chunks/app/onboarding/page-4809c2f644098f19.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
|
@ -434,3 +434,188 @@ def delete_responses(
|
|||
completion_kwargs=local_vars,
|
||||
extra_kwargs=kwargs,
|
||||
)
|
||||
|
||||
@client
|
||||
async def aget_responses(
|
||||
response_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Optional[Dict[str, Any]] = None,
|
||||
extra_query: Optional[Dict[str, Any]] = None,
|
||||
extra_body: Optional[Dict[str, Any]] = None,
|
||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||
# LiteLLM specific params,
|
||||
custom_llm_provider: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> ResponsesAPIResponse:
|
||||
"""
|
||||
Async: Fetch a response by its ID.
|
||||
|
||||
GET /v1/responses/{response_id} endpoint in the responses API
|
||||
|
||||
Args:
|
||||
response_id: The ID of the response to fetch.
|
||||
custom_llm_provider: Optional provider name. If not specified, will be decoded from response_id.
|
||||
|
||||
Returns:
|
||||
The response object with complete information about the stored response.
|
||||
"""
|
||||
local_vars = locals()
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
kwargs["aget_responses"] = True
|
||||
|
||||
# get custom llm provider from response_id
|
||||
decoded_response_id: DecodedResponseId = (
|
||||
ResponsesAPIRequestUtils._decode_responses_api_response_id(
|
||||
response_id=response_id,
|
||||
)
|
||||
)
|
||||
response_id = decoded_response_id.get("response_id") or response_id
|
||||
custom_llm_provider = (
|
||||
decoded_response_id.get("custom_llm_provider") or custom_llm_provider
|
||||
)
|
||||
|
||||
func = partial(
|
||||
get_responses,
|
||||
response_id=response_id,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
extra_headers=extra_headers,
|
||||
extra_query=extra_query,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
ctx = contextvars.copy_context()
|
||||
func_with_context = partial(ctx.run, func)
|
||||
init_response = await loop.run_in_executor(None, func_with_context)
|
||||
|
||||
if asyncio.iscoroutine(init_response):
|
||||
response = await init_response
|
||||
else:
|
||||
response = init_response
|
||||
|
||||
# Update the responses_api_response_id with the model_id
|
||||
if isinstance(response, ResponsesAPIResponse):
|
||||
response = ResponsesAPIRequestUtils._update_responses_api_response_id_with_model_id(
|
||||
responses_api_response=response,
|
||||
litellm_metadata=kwargs.get("litellm_metadata", {}),
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
)
|
||||
return response
|
||||
except Exception as e:
|
||||
raise litellm.exception_type(
|
||||
model=None,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
original_exception=e,
|
||||
completion_kwargs=local_vars,
|
||||
extra_kwargs=kwargs,
|
||||
)
|
||||
|
||||
@client
|
||||
def get_responses(
|
||||
response_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Optional[Dict[str, Any]] = None,
|
||||
extra_query: Optional[Dict[str, Any]] = None,
|
||||
extra_body: Optional[Dict[str, Any]] = None,
|
||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||
# LiteLLM specific params,
|
||||
custom_llm_provider: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> Union[ResponsesAPIResponse, Coroutine[Any, Any, ResponsesAPIResponse]]:
|
||||
"""
|
||||
Fetch a response by its ID.
|
||||
|
||||
GET /v1/responses/{response_id} endpoint in the responses API
|
||||
|
||||
Args:
|
||||
response_id: The ID of the response to fetch.
|
||||
custom_llm_provider: Optional provider name. If not specified, will be decoded from response_id.
|
||||
|
||||
Returns:
|
||||
The response object with complete information about the stored response.
|
||||
"""
|
||||
local_vars = locals()
|
||||
try:
|
||||
litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore
|
||||
litellm_call_id: Optional[str] = kwargs.get("litellm_call_id", None)
|
||||
_is_async = kwargs.pop("aget_responses", False) is True
|
||||
|
||||
# get llm provider logic
|
||||
litellm_params = GenericLiteLLMParams(**kwargs)
|
||||
|
||||
# get custom llm provider from response_id
|
||||
decoded_response_id: DecodedResponseId = (
|
||||
ResponsesAPIRequestUtils._decode_responses_api_response_id(
|
||||
response_id=response_id,
|
||||
)
|
||||
)
|
||||
response_id = decoded_response_id.get("response_id") or response_id
|
||||
custom_llm_provider = (
|
||||
decoded_response_id.get("custom_llm_provider") or custom_llm_provider
|
||||
)
|
||||
|
||||
if custom_llm_provider is None:
|
||||
raise ValueError("custom_llm_provider is required but passed as None")
|
||||
|
||||
# get provider config
|
||||
responses_api_provider_config: Optional[BaseResponsesAPIConfig] = (
|
||||
ProviderConfigManager.get_provider_responses_api_config(
|
||||
model=None,
|
||||
provider=litellm.LlmProviders(custom_llm_provider),
|
||||
)
|
||||
)
|
||||
|
||||
if responses_api_provider_config is None:
|
||||
raise ValueError(
|
||||
f"GET responses is not supported for {custom_llm_provider}"
|
||||
)
|
||||
|
||||
local_vars.update(kwargs)
|
||||
|
||||
# Pre Call logging
|
||||
litellm_logging_obj.update_environment_variables(
|
||||
model=None,
|
||||
optional_params={
|
||||
"response_id": response_id,
|
||||
},
|
||||
litellm_params={
|
||||
"litellm_call_id": litellm_call_id,
|
||||
},
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
)
|
||||
|
||||
# Call the handler with _is_async flag instead of directly calling the async handler
|
||||
response = base_llm_http_handler.get_responses(
|
||||
response_id=response_id,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
responses_api_provider_config=responses_api_provider_config,
|
||||
litellm_params=litellm_params,
|
||||
logging_obj=litellm_logging_obj,
|
||||
extra_headers=extra_headers,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout or request_timeout,
|
||||
_is_async=_is_async,
|
||||
client=kwargs.get("client"),
|
||||
)
|
||||
|
||||
# Update the responses_api_response_id with the model_id
|
||||
if isinstance(response, ResponsesAPIResponse):
|
||||
response = ResponsesAPIRequestUtils._update_responses_api_response_id_with_model_id(
|
||||
responses_api_response=response,
|
||||
litellm_metadata=kwargs.get("litellm_metadata", {}),
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
)
|
||||
|
||||
return response
|
||||
except Exception as e:
|
||||
raise litellm.exception_type(
|
||||
model=None,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
original_exception=e,
|
||||
completion_kwargs=local_vars,
|
||||
extra_kwargs=kwargs,
|
||||
)
|
|
@ -824,12 +824,12 @@ class OpenAIChatCompletionChunk(ChatCompletionChunk):
|
|||
|
||||
class Hyperparameters(BaseModel):
|
||||
batch_size: Optional[Union[str, int]] = None # "Number of examples in each batch."
|
||||
learning_rate_multiplier: Optional[
|
||||
Union[str, float]
|
||||
] = None # Scaling factor for the learning rate
|
||||
n_epochs: Optional[
|
||||
Union[str, int]
|
||||
] = None # "The number of epochs to train the model for"
|
||||
learning_rate_multiplier: Optional[Union[str, float]] = (
|
||||
None # Scaling factor for the learning rate
|
||||
)
|
||||
n_epochs: Optional[Union[str, int]] = (
|
||||
None # "The number of epochs to train the model for"
|
||||
)
|
||||
|
||||
|
||||
class FineTuningJobCreate(BaseModel):
|
||||
|
@ -856,18 +856,18 @@ class FineTuningJobCreate(BaseModel):
|
|||
|
||||
model: str # "The name of the model to fine-tune."
|
||||
training_file: str # "The ID of an uploaded file that contains training data."
|
||||
hyperparameters: Optional[
|
||||
Hyperparameters
|
||||
] = None # "The hyperparameters used for the fine-tuning job."
|
||||
suffix: Optional[
|
||||
str
|
||||
] = None # "A string of up to 18 characters that will be added to your fine-tuned model name."
|
||||
validation_file: Optional[
|
||||
str
|
||||
] = None # "The ID of an uploaded file that contains validation data."
|
||||
integrations: Optional[
|
||||
List[str]
|
||||
] = None # "A list of integrations to enable for your fine-tuning job."
|
||||
hyperparameters: Optional[Hyperparameters] = (
|
||||
None # "The hyperparameters used for the fine-tuning job."
|
||||
)
|
||||
suffix: Optional[str] = (
|
||||
None # "A string of up to 18 characters that will be added to your fine-tuned model name."
|
||||
)
|
||||
validation_file: Optional[str] = (
|
||||
None # "The ID of an uploaded file that contains validation data."
|
||||
)
|
||||
integrations: Optional[List[str]] = (
|
||||
None # "A list of integrations to enable for your fine-tuning job."
|
||||
)
|
||||
seed: Optional[int] = None # "The seed controls the reproducibility of the job."
|
||||
|
||||
|
||||
|
@ -1259,3 +1259,12 @@ class OpenAIRealtimeStreamResponseBaseObject(TypedDict):
|
|||
OpenAIRealtimeStreamList = List[
|
||||
Union[OpenAIRealtimeStreamResponseBaseObject, OpenAIRealtimeStreamSessionEvents]
|
||||
]
|
||||
|
||||
|
||||
class ImageGenerationRequestQuality(str, Enum):
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
AUTO = "auto"
|
||||
STANDARD = "standard"
|
||||
HD = "hd"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "litellm"
|
||||
version = "1.67.2"
|
||||
version = "1.67.3"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
authors = ["BerriAI"]
|
||||
license = "MIT"
|
||||
|
@ -124,7 +124,7 @@ requires = ["poetry-core", "wheel"]
|
|||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.commitizen]
|
||||
version = "1.67.2"
|
||||
version = "1.67.3"
|
||||
version_files = [
|
||||
"pyproject.toml:^version"
|
||||
]
|
||||
|
|
|
@ -66,8 +66,8 @@ class BaseImageGenTest(ABC):
|
|||
logged_standard_logging_payload = custom_logger.standard_logging_payload
|
||||
print("logged_standard_logging_payload", logged_standard_logging_payload)
|
||||
assert logged_standard_logging_payload is not None
|
||||
# assert logged_standard_logging_payload["response_cost"] is not None
|
||||
# assert logged_standard_logging_payload["response_cost"] > 0
|
||||
assert logged_standard_logging_payload["response_cost"] is not None
|
||||
assert logged_standard_logging_payload["response_cost"] > 0
|
||||
|
||||
from openai.types.images_response import ImagesResponse
|
||||
|
||||
|
@ -85,4 +85,4 @@ class BaseImageGenTest(ABC):
|
|||
if "Your task failed as a result of our safety system." in str(e):
|
||||
pass
|
||||
else:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
|
@ -161,6 +161,9 @@ class TestOpenAIDalle3(BaseImageGenTest):
|
|||
def get_base_image_generation_call_args(self) -> dict:
|
||||
return {"model": "dall-e-3"}
|
||||
|
||||
class TestOpenAIGPTImage1(BaseImageGenTest):
|
||||
def get_base_image_generation_call_args(self) -> dict:
|
||||
return {"model": "gpt-image-1"}
|
||||
|
||||
class TestAzureOpenAIDalle3(BaseImageGenTest):
|
||||
def get_base_image_generation_call_args(self) -> dict:
|
||||
|
|
|
@ -78,3 +78,43 @@ def test_router_with_model_info_and_model_group():
|
|||
model_group="gpt-3.5-turbo",
|
||||
user_facing_model_group_name="gpt-3.5-turbo",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_router_with_tags_and_fallbacks():
|
||||
"""
|
||||
If fallback model missing tag, raise error
|
||||
"""
|
||||
from litellm import Router
|
||||
|
||||
router = Router(
|
||||
model_list=[
|
||||
{
|
||||
"model_name": "gpt-3.5-turbo",
|
||||
"litellm_params": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"mock_response": "Hello, world!",
|
||||
"tags": ["test"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"model_name": "anthropic-claude-3-5-sonnet",
|
||||
"litellm_params": {
|
||||
"model": "claude-3-5-sonnet-latest",
|
||||
"mock_response": "Hello, world 2!",
|
||||
},
|
||||
},
|
||||
],
|
||||
fallbacks=[
|
||||
{"gpt-3.5-turbo": ["anthropic-claude-3-5-sonnet"]},
|
||||
],
|
||||
enable_tag_filtering=True,
|
||||
)
|
||||
|
||||
with pytest.raises(Exception):
|
||||
response = await router.acompletion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "user", "content": "Hello, world!"}],
|
||||
mock_testing_fallbacks=True,
|
||||
metadata={"tags": ["test"]},
|
||||
)
|
||||
|
|
|
@ -274,5 +274,44 @@ class BaseResponsesAPITest(ABC):
|
|||
**base_completion_call_args
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("sync_mode", [False, True])
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic_openai_responses_get_endpoint(self, sync_mode):
|
||||
litellm._turn_on_debug()
|
||||
litellm.set_verbose = True
|
||||
base_completion_call_args = self.get_base_completion_call_args()
|
||||
if sync_mode:
|
||||
response = litellm.responses(
|
||||
input="Basic ping", max_output_tokens=20,
|
||||
**base_completion_call_args
|
||||
)
|
||||
|
||||
# get the response
|
||||
if isinstance(response, ResponsesAPIResponse):
|
||||
result = litellm.get_responses(
|
||||
response_id=response.id,
|
||||
**base_completion_call_args
|
||||
)
|
||||
assert result is not None
|
||||
assert result.id == response.id
|
||||
assert result.output == response.output
|
||||
else:
|
||||
raise ValueError("response is not a ResponsesAPIResponse")
|
||||
else:
|
||||
response = await litellm.aresponses(
|
||||
input="Basic ping", max_output_tokens=20,
|
||||
**base_completion_call_args
|
||||
)
|
||||
# async get the response
|
||||
if isinstance(response, ResponsesAPIResponse):
|
||||
result = await litellm.aget_responses(
|
||||
response_id=response.id,
|
||||
**base_completion_call_args
|
||||
)
|
||||
assert result is not None
|
||||
assert result.id == response.id
|
||||
assert result.output == response.output
|
||||
else:
|
||||
raise ValueError("response is not a ResponsesAPIResponse")
|
||||
|
||||
|
||||
|
|
|
@ -36,6 +36,11 @@ class TestAnthropicResponsesAPITest(BaseResponsesAPITest):
|
|||
async def test_basic_openai_responses_streaming_delete_endpoint(self, sync_mode=False):
|
||||
pass
|
||||
|
||||
async def test_basic_openai_responses_get_endpoint(self, sync_mode=False):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
def test_multiturn_tool_calls():
|
||||
# Test streaming response with tools for Anthropic
|
||||
|
|
|
@ -811,6 +811,7 @@ class BaseLLMChatTest(ABC):
|
|||
|
||||
return url
|
||||
|
||||
@pytest.mark.flaky(retries=3, delay=1)
|
||||
def test_empty_tools(self):
|
||||
"""
|
||||
Related Issue: https://github.com/BerriAI/litellm/issues/9080
|
||||
|
@ -833,6 +834,8 @@ class BaseLLMChatTest(ABC):
|
|||
response = completion(**base_completion_call_args, messages=[{"role": "user", "content": "Hello, how are you?"}], tools=[]) # just make sure call doesn't fail
|
||||
print("response: ", response)
|
||||
assert response is not None
|
||||
except litellm.ContentPolicyViolationError:
|
||||
pass
|
||||
except litellm.InternalServerError:
|
||||
pytest.skip("Model is overloaded")
|
||||
except litellm.RateLimitError:
|
||||
|
@ -840,6 +843,7 @@ class BaseLLMChatTest(ABC):
|
|||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
@pytest.mark.flaky(retries=3, delay=1)
|
||||
def test_basic_tool_calling(self):
|
||||
try:
|
||||
from litellm import completion, ModelResponse
|
||||
|
@ -943,6 +947,8 @@ class BaseLLMChatTest(ABC):
|
|||
second_response.choices[0].message.content is not None
|
||||
or second_response.choices[0].message.tool_calls is not None
|
||||
)
|
||||
except litellm.ServiceUnavailableError:
|
||||
pytest.skip("Model is overloaded")
|
||||
except litellm.InternalServerError:
|
||||
pytest.skip("Model is overloaded")
|
||||
except litellm.RateLimitError:
|
||||
|
|
|
@ -2972,6 +2972,30 @@ def test_bedrock_application_inference_profile():
|
|||
client = HTTPHandler()
|
||||
client2 = HTTPHandler()
|
||||
|
||||
tools = [{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. San Francisco, CA",
|
||||
},
|
||||
"unit": {
|
||||
"type": "string",
|
||||
"enum": ["celsius", "fahrenheit"],
|
||||
},
|
||||
},
|
||||
"required": ["location"],
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
with patch.object(client, "post") as mock_post, patch.object(
|
||||
client2, "post"
|
||||
) as mock_post2:
|
||||
|
@ -2981,6 +3005,7 @@ def test_bedrock_application_inference_profile():
|
|||
messages=[{"role": "user", "content": "Hello, how are you?"}],
|
||||
model_id="arn:aws:bedrock:eu-central-1:000000000000:application-inference-profile/a0a0a0a0a0a0",
|
||||
client=client,
|
||||
tools=tools
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
@ -2990,6 +3015,7 @@ def test_bedrock_application_inference_profile():
|
|||
model="bedrock/converse/arn:aws:bedrock:eu-central-1:000000000000:application-inference-profile/a0a0a0a0a0a0",
|
||||
messages=[{"role": "user", "content": "Hello, how are you?"}],
|
||||
client=client2,
|
||||
tools=tools
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{35677:function(n,e,t){Promise.resolve().then(t.t.bind(t,39974,23)),Promise.resolve().then(t.t.bind(t,2778,23))},2778:function(){},39974:function(n){n.exports={style:{fontFamily:"'__Inter_cf7686', '__Inter_Fallback_cf7686'",fontStyle:"normal"},className:"__className_cf7686"}}},function(n){n.O(0,[919,986,971,117,744],function(){return n(n.s=35677)}),_N_E=n.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[185],{96443:function(n,e,t){Promise.resolve().then(t.t.bind(t,39974,23)),Promise.resolve().then(t.t.bind(t,2778,23))},2778:function(){},39974:function(n){n.exports={style:{fontFamily:"'__Inter_cf7686', '__Inter_Fallback_cf7686'",fontStyle:"normal"},className:"__className_cf7686"}}},function(n){n.O(0,[919,986,971,117,744],function(){return n(n.s=96443)}),_N_E=n.O()}]);
|
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[418],{56668:function(e,n,u){Promise.resolve().then(u.bind(u,52829))},52829:function(e,n,u){"use strict";u.r(n),u.d(n,{default:function(){return f}});var t=u(57437),s=u(2265),r=u(99376),c=u(92699);function f(){let e=(0,r.useSearchParams)().get("key"),[n,u]=(0,s.useState)(null);return(0,s.useEffect)(()=>{e&&u(e)},[e]),(0,t.jsx)(c.Z,{accessToken:n,publicPage:!0,premiumUser:!1})}}},function(e){e.O(0,[42,261,250,699,971,117,744],function(){return e(e.s=56668)}),_N_E=e.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[418],{21024:function(e,n,u){Promise.resolve().then(u.bind(u,52829))},52829:function(e,n,u){"use strict";u.r(n),u.d(n,{default:function(){return f}});var t=u(57437),s=u(2265),r=u(99376),c=u(92699);function f(){let e=(0,r.useSearchParams)().get("key"),[n,u]=(0,s.useState)(null);return(0,s.useEffect)(()=>{e&&u(e)},[e]),(0,t.jsx)(c.Z,{accessToken:n,publicPage:!0,premiumUser:!1})}}},function(e){e.O(0,[42,261,250,699,971,117,744],function(){return e(e.s=21024)}),_N_E=e.O()}]);
|
|
@ -0,0 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{8672:function(e,t,n){Promise.resolve().then(n.bind(n,12011))},12011:function(e,t,n){"use strict";n.r(t),n.d(t,{default:function(){return S}});var s=n(57437),o=n(2265),a=n(99376),c=n(20831),i=n(94789),l=n(12514),r=n(49804),u=n(67101),m=n(84264),d=n(49566),h=n(96761),x=n(84566),p=n(19250),f=n(14474),k=n(13634),g=n(73002),j=n(3914);function S(){let[e]=k.Z.useForm(),t=(0,a.useSearchParams)();(0,j.e)("token");let n=t.get("invitation_id"),[S,w]=(0,o.useState)(null),[Z,_]=(0,o.useState)(""),[N,b]=(0,o.useState)(""),[T,y]=(0,o.useState)(null),[E,v]=(0,o.useState)(""),[C,U]=(0,o.useState)("");return(0,o.useEffect)(()=>{n&&(0,p.W_)(n).then(e=>{let t=e.login_url;console.log("login_url:",t),v(t);let n=e.token,s=(0,f.o)(n);U(n),console.log("decoded:",s),w(s.key),console.log("decoded user email:",s.user_email),b(s.user_email),y(s.user_id)})},[n]),(0,s.jsx)("div",{className:"mx-auto w-full max-w-md mt-10",children:(0,s.jsxs)(l.Z,{children:[(0,s.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,s.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,s.jsx)(m.Z,{children:"Claim your user account to login to Admin UI."}),(0,s.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,s.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,s.jsx)(r.Z,{children:"SSO is under the Enterprise Tier."}),(0,s.jsx)(r.Z,{children:(0,s.jsx)(c.Z,{variant:"primary",className:"mb-2",children:(0,s.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,s.jsxs)(k.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",S,"token:",C,"formValues:",e),S&&C&&(e.user_email=N,T&&n&&(0,p.m_)(S,n,T,e.password).then(e=>{let t="/ui/";t+="?login=success",document.cookie="token="+C,console.log("redirecting to:",t),window.location.href=t}))},children:[(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(k.Z.Item,{label:"Email Address",name:"user_email",children:(0,s.jsx)(d.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,s.jsx)(k.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,s.jsx)(d.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,s.jsx)("div",{className:"mt-10",children:(0,s.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}},3914:function(e,t,n){"use strict";function s(){let e=window.location.hostname,t=["Lax","Strict","None"];["/","/ui"].forEach(n=>{document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,";"),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,";"),t.forEach(t=>{let s="None"===t?" Secure;":"";document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; SameSite=").concat(t,";").concat(s),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,"; SameSite=").concat(t,";").concat(s)})}),console.log("After clearing cookies:",document.cookie)}function o(e){let t=document.cookie.split("; ").find(t=>t.startsWith(e+"="));return t?t.split("=")[1]:null}n.d(t,{b:function(){return s},e:function(){return o}})}},function(e){e.O(0,[665,42,899,250,971,117,744],function(){return e(e.s=8672)}),_N_E=e.O()}]);
|
|
@ -1 +0,0 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{23781:function(e,t,n){Promise.resolve().then(n.bind(n,12011))},12011:function(e,t,n){"use strict";n.r(t),n.d(t,{default:function(){return S}});var s=n(57437),o=n(2265),a=n(99376),c=n(20831),i=n(94789),l=n(12514),r=n(49804),u=n(67101),m=n(84264),d=n(49566),h=n(96761),x=n(84566),p=n(19250),f=n(14474),k=n(13634),g=n(73002),j=n(3914);function S(){let[e]=k.Z.useForm(),t=(0,a.useSearchParams)();(0,j.e)("token");let n=t.get("invitation_id"),[S,w]=(0,o.useState)(null),[Z,_]=(0,o.useState)(""),[N,b]=(0,o.useState)(""),[T,y]=(0,o.useState)(null),[E,v]=(0,o.useState)(""),[C,U]=(0,o.useState)("");return(0,o.useEffect)(()=>{n&&(0,p.W_)(n).then(e=>{let t=e.login_url;console.log("login_url:",t),v(t);let n=e.token,s=(0,f.o)(n);U(n),console.log("decoded:",s),w(s.key),console.log("decoded user email:",s.user_email),b(s.user_email),y(s.user_id)})},[n]),(0,s.jsx)("div",{className:"mx-auto w-full max-w-md mt-10",children:(0,s.jsxs)(l.Z,{children:[(0,s.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,s.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,s.jsx)(m.Z,{children:"Claim your user account to login to Admin UI."}),(0,s.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,s.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,s.jsx)(r.Z,{children:"SSO is under the Enterprise Tier."}),(0,s.jsx)(r.Z,{children:(0,s.jsx)(c.Z,{variant:"primary",className:"mb-2",children:(0,s.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,s.jsxs)(k.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",S,"token:",C,"formValues:",e),S&&C&&(e.user_email=N,T&&n&&(0,p.m_)(S,n,T,e.password).then(e=>{let t="/ui/";t+="?login=success",document.cookie="token="+C,console.log("redirecting to:",t),window.location.href=t}))},children:[(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(k.Z.Item,{label:"Email Address",name:"user_email",children:(0,s.jsx)(d.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,s.jsx)(k.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,s.jsx)(d.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,s.jsx)("div",{className:"mt-10",children:(0,s.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}},3914:function(e,t,n){"use strict";function s(){let e=window.location.hostname,t=["Lax","Strict","None"];["/","/ui"].forEach(n=>{document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,";"),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,";"),t.forEach(t=>{let s="None"===t?" Secure;":"";document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; SameSite=").concat(t,";").concat(s),document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=".concat(n,"; domain=").concat(e,"; SameSite=").concat(t,";").concat(s)})}),console.log("After clearing cookies:",document.cookie)}function o(e){let t=document.cookie.split("; ").find(t=>t.startsWith(e+"="));return t?t.split("=")[1]:null}n.d(t,{b:function(){return s},e:function(){return o}})}},function(e){e.O(0,[665,42,899,250,971,117,744],function(){return e(e.s=23781)}),_N_E=e.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{35618:function(e,n,t){Promise.resolve().then(t.t.bind(t,12846,23)),Promise.resolve().then(t.t.bind(t,19107,23)),Promise.resolve().then(t.t.bind(t,61060,23)),Promise.resolve().then(t.t.bind(t,4707,23)),Promise.resolve().then(t.t.bind(t,80,23)),Promise.resolve().then(t.t.bind(t,36423,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,117],function(){return n(54278),n(35618)}),_N_E=e.O()}]);
|
||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[744],{10264:function(e,n,t){Promise.resolve().then(t.t.bind(t,12846,23)),Promise.resolve().then(t.t.bind(t,19107,23)),Promise.resolve().then(t.t.bind(t,61060,23)),Promise.resolve().then(t.t.bind(t,4707,23)),Promise.resolve().then(t.t.bind(t,80,23)),Promise.resolve().then(t.t.bind(t,36423,23))}},function(e){var n=function(n){return e(e.s=n)};e.O(0,[971,117],function(){return n(54278),n(10264)}),_N_E=e.O()}]);
|
|
@ -1 +1 @@
|
|||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-75a5453f51d60261.js"/><script src="/ui/_next/static/chunks/fd9d1056-205af899b895cbac.js" async=""></script><script src="/ui/_next/static/chunks/117-1c5bfc45bfc4237d.js" async=""></script><script src="/ui/_next/static/chunks/main-app-2b16cdb7ff4e1af7.js" async=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-75a5453f51d60261.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"style\"]\n3:HL[\"/ui/_next/static/css/005c96178151b9fd.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"4:I[12846,[],\"\"]\n6:I[19107,[],\"ClientPageRoot\"]\n7:I[76737,[\"665\",\"static/chunks/3014691f-b7b79b78e27792f3.js\",\"990\",\"static/chunks/13b76428-ebdf3012af0e4489.js\",\"42\",\"static/chunks/42-69f5b4e6a9942a9f.js\",\"261\",\"static/chunks/261-ee7f0f1f1c8c22a0.js\",\"899\",\"static/chunks/899-57685cedd1dcbc78.js\",\"466\",\"static/chunks/466-65538e7f331af98e.js\",\"250\",\"static/chunks/250-7d480872c0e251dc.js\",\"699\",\"static/chunks/699-2176ba2273e4676d.js\",\"931\",\"static/chunks/app/page-36914b80c40b5032.js\"],\"default\",1]\n8:I[4707,[],\"\"]\n9:I[36423,[],\"\"]\nb:I[61060,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"0:[\"$\",\"$L4\",null,{\"buildId\":\"fzhvjOFL6KeNsWYrLD4ya\",\"assetPrefix\":\"/ui\",\"urlParts\":[\"\",\"\"],\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[[\"$L5\",[\"$\",\"$L6\",null,{\"props\":{\"params\":{},\"searchParams\":{}},\"Component\":\"$7\"}],null],null],null]},[[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}],[\"$\",\"link\",\"1\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/005c96178151b9fd.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_cf7686\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]],null],null],\"couldBeIntercepted\":false,\"initialHead\":[null,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script></body></html>
|
||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-75a5453f51d60261.js"/><script src="/ui/_next/static/chunks/fd9d1056-205af899b895cbac.js" async=""></script><script src="/ui/_next/static/chunks/117-1c5bfc45bfc4237d.js" async=""></script><script src="/ui/_next/static/chunks/main-app-4f7318ae681a6d94.js" async=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-75a5453f51d60261.js" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/a34f9d1faa5f3315-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"style\"]\n3:HL[\"/ui/_next/static/css/005c96178151b9fd.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"4:I[12846,[],\"\"]\n6:I[19107,[],\"ClientPageRoot\"]\n7:I[69451,[\"665\",\"static/chunks/3014691f-b7b79b78e27792f3.js\",\"990\",\"static/chunks/13b76428-ebdf3012af0e4489.js\",\"42\",\"static/chunks/42-014374badc35fe9b.js\",\"261\",\"static/chunks/261-92d8946249b3296e.js\",\"899\",\"static/chunks/899-54ea329f41297bf0.js\",\"978\",\"static/chunks/978-3e0bd2034b623309.js\",\"250\",\"static/chunks/250-e4cc2ceb9ff1c37a.js\",\"699\",\"static/chunks/699-f4066c747670f979.js\",\"931\",\"static/chunks/app/page-9bd76bfe1ce0a80a.js\"],\"default\",1]\n8:I[4707,[],\"\"]\n9:I[36423,[],\"\"]\nb:I[61060,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"0:[\"$\",\"$L4\",null,{\"buildId\":\"sh3mKTgIKifNl8lsgZ675\",\"assetPrefix\":\"/ui\",\"urlParts\":[\"\",\"\"],\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[[\"$L5\",[\"$\",\"$L6\",null,{\"props\":{\"params\":{},\"searchParams\":{}},\"Component\":\"$7\"}],null],null],null]},[[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/86f6cc749f6b8493.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}],[\"$\",\"link\",\"1\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/005c96178151b9fd.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_cf7686\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[]}]}]}]],null],null],\"couldBeIntercepted\":false,\"initialHead\":[null,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[76737,["665","static/chunks/3014691f-b7b79b78e27792f3.js","990","static/chunks/13b76428-ebdf3012af0e4489.js","42","static/chunks/42-69f5b4e6a9942a9f.js","261","static/chunks/261-ee7f0f1f1c8c22a0.js","899","static/chunks/899-57685cedd1dcbc78.js","466","static/chunks/466-65538e7f331af98e.js","250","static/chunks/250-7d480872c0e251dc.js","699","static/chunks/699-2176ba2273e4676d.js","931","static/chunks/app/page-36914b80c40b5032.js"],"default",1]
|
||||
3:I[69451,["665","static/chunks/3014691f-b7b79b78e27792f3.js","990","static/chunks/13b76428-ebdf3012af0e4489.js","42","static/chunks/42-014374badc35fe9b.js","261","static/chunks/261-92d8946249b3296e.js","899","static/chunks/899-54ea329f41297bf0.js","978","static/chunks/978-3e0bd2034b623309.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","699","static/chunks/699-f4066c747670f979.js","931","static/chunks/app/page-9bd76bfe1ce0a80a.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[52829,["42","static/chunks/42-69f5b4e6a9942a9f.js","261","static/chunks/261-ee7f0f1f1c8c22a0.js","250","static/chunks/250-7d480872c0e251dc.js","699","static/chunks/699-2176ba2273e4676d.js","418","static/chunks/app/model_hub/page-a965e43ba9638156.js"],"default",1]
|
||||
3:I[52829,["42","static/chunks/42-014374badc35fe9b.js","261","static/chunks/261-92d8946249b3296e.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","699","static/chunks/699-f4066c747670f979.js","418","static/chunks/app/model_hub/page-3d2c374ee41b38e5.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
2:I[19107,[],"ClientPageRoot"]
|
||||
3:I[12011,["665","static/chunks/3014691f-b7b79b78e27792f3.js","42","static/chunks/42-69f5b4e6a9942a9f.js","899","static/chunks/899-57685cedd1dcbc78.js","250","static/chunks/250-7d480872c0e251dc.js","461","static/chunks/app/onboarding/page-9598003bc1e91371.js"],"default",1]
|
||||
3:I[12011,["665","static/chunks/3014691f-b7b79b78e27792f3.js","42","static/chunks/42-014374badc35fe9b.js","899","static/chunks/899-54ea329f41297bf0.js","250","static/chunks/250-e4cc2ceb9ff1c37a.js","461","static/chunks/app/onboarding/page-4809c2f644098f19.js"],"default",1]
|
||||
4:I[4707,[],""]
|
||||
5:I[36423,[],""]
|
||||
0:["fzhvjOFL6KeNsWYrLD4ya",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
0:["sh3mKTgIKifNl8lsgZ675",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}],null],null],null]},[null,["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/86f6cc749f6b8493.css","precedence":"next","crossOrigin":"$undefined"}],["$","link","1",{"rel":"stylesheet","href":"/ui/_next/static/css/005c96178151b9fd.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_cf7686","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]],null],null],["$L6",null]]]]
|
||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||
1:null
|
||||
|
|
|
@ -139,6 +139,8 @@ export default function CreateKeyPage() {
|
|||
|
||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||
|
||||
const redirectToLogin = authLoading === false && token === null && invitation_id === null;
|
||||
|
||||
useEffect(() => {
|
||||
const token = getCookie("token");
|
||||
setToken(token);
|
||||
|
@ -146,10 +148,10 @@ export default function CreateKeyPage() {
|
|||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (authLoading === false && token === null) {
|
||||
if (redirectToLogin) {
|
||||
window.location.href = (proxyBaseUrl || "") + "/sso/key/generate"
|
||||
}
|
||||
}, [token, authLoading])
|
||||
}, [redirectToLogin])
|
||||
|
||||
useEffect(() => {
|
||||
if (!token) {
|
||||
|
@ -221,7 +223,7 @@ export default function CreateKeyPage() {
|
|||
}
|
||||
}, [accessToken, userID, userRole]);
|
||||
|
||||
if (authLoading || (authLoading == false && token === null)) {
|
||||
if (authLoading || redirectToLogin) {
|
||||
return <LoadingScreen />
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ const ConditionalPublicModelName: React.FC = () => {
|
|||
const customModelName = Form.useWatch('custom_model_name', form);
|
||||
const showPublicModelName = !selectedModels.includes('all-wildcard');
|
||||
|
||||
|
||||
// Force table to re-render when custom model name changes
|
||||
useEffect(() => {
|
||||
if (customModelName && selectedModels.includes('custom')) {
|
||||
|
@ -35,20 +36,33 @@ const ConditionalPublicModelName: React.FC = () => {
|
|||
// Initial setup of model mappings when models are selected
|
||||
useEffect(() => {
|
||||
if (selectedModels.length > 0 && !selectedModels.includes('all-wildcard')) {
|
||||
const mappings = selectedModels.map((model: string) => {
|
||||
if (model === 'custom' && customModelName) {
|
||||
// Check if we already have mappings that match the selected models
|
||||
const currentMappings = form.getFieldValue('model_mappings') || [];
|
||||
|
||||
// Only update if the mappings don't exist or don't match the selected models
|
||||
const shouldUpdateMappings = currentMappings.length !== selectedModels.length ||
|
||||
!selectedModels.every(model =>
|
||||
currentMappings.some((mapping: { public_name: string; litellm_model: string }) =>
|
||||
mapping.public_name === model ||
|
||||
(model === 'custom' && mapping.public_name === customModelName)));
|
||||
|
||||
if (shouldUpdateMappings) {
|
||||
const mappings = selectedModels.map((model: string) => {
|
||||
if (model === 'custom' && customModelName) {
|
||||
return {
|
||||
public_name: customModelName,
|
||||
litellm_model: customModelName
|
||||
};
|
||||
}
|
||||
return {
|
||||
public_name: customModelName,
|
||||
litellm_model: customModelName
|
||||
public_name: model,
|
||||
litellm_model: model
|
||||
};
|
||||
}
|
||||
return {
|
||||
public_name: model,
|
||||
litellm_model: model
|
||||
};
|
||||
});
|
||||
form.setFieldValue('model_mappings', mappings);
|
||||
setTableKey(prev => prev + 1); // Force table re-render
|
||||
});
|
||||
|
||||
form.setFieldValue('model_mappings', mappings);
|
||||
setTableKey(prev => prev + 1); // Force table re-render
|
||||
}
|
||||
}
|
||||
}, [selectedModels, customModelName, form]);
|
||||
|
||||
|
|
|
@ -23,22 +23,34 @@ const LiteLLMModelNameField: React.FC<LiteLLMModelNameFieldProps> = ({
|
|||
|
||||
// If "all-wildcard" is selected, clear the model_name field
|
||||
if (values.includes("all-wildcard")) {
|
||||
form.setFieldsValue({ model_name: undefined, model_mappings: [] });
|
||||
form.setFieldsValue({ model: undefined, model_mappings: [] });
|
||||
} else {
|
||||
// Update model mappings immediately for each selected model
|
||||
const mappings = values
|
||||
.map(model => ({
|
||||
// Get current model value to check if we need to update
|
||||
const currentModel = form.getFieldValue('model');
|
||||
|
||||
// Only update if the value has actually changed
|
||||
if (JSON.stringify(currentModel) !== JSON.stringify(values)) {
|
||||
|
||||
// Create mappings first
|
||||
const mappings = values.map(model => ({
|
||||
public_name: model,
|
||||
litellm_model: model
|
||||
}));
|
||||
form.setFieldsValue({ model_mappings: mappings });
|
||||
|
||||
// Update both fields in one call to reduce re-renders
|
||||
form.setFieldsValue({
|
||||
model: values,
|
||||
model_mappings: mappings
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Handle custom model name changes
|
||||
const handleCustomModelNameChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const customName = e.target.value;
|
||||
|
||||
|
||||
// Immediately update the model mappings
|
||||
const currentMappings = form.getFieldValue('model_mappings') || [];
|
||||
const updatedMappings = currentMappings.map((mapping: any) => {
|
||||
|
@ -69,7 +81,11 @@ const LiteLLMModelNameField: React.FC<LiteLLMModelNameFieldProps> = ({
|
|||
{(selectedProvider === Providers.Azure) ||
|
||||
(selectedProvider === Providers.OpenAI_Compatible) ||
|
||||
(selectedProvider === Providers.Ollama) ? (
|
||||
<TextInput placeholder={getPlaceholder(selectedProvider)} />
|
||||
<>
|
||||
<TextInput
|
||||
placeholder={getPlaceholder(selectedProvider)}
|
||||
/>
|
||||
</>
|
||||
) : providerModels.length > 0 ? (
|
||||
<AntSelect
|
||||
mode="multiple"
|
||||
|
|
|
@ -117,13 +117,14 @@ const Createuser: React.FC<CreateuserProps> = ({
|
|||
form.resetFields();
|
||||
};
|
||||
|
||||
const handleCreate = async (formValues: { user_id: string, models?: string[] }) => {
|
||||
const handleCreate = async (formValues: { user_id: string, models?: string[], user_role: string }) => {
|
||||
try {
|
||||
message.info("Making API Call");
|
||||
if (!isEmbedded) {
|
||||
setIsModalVisible(true);
|
||||
}
|
||||
if (!formValues.models || formValues.models.length === 0) {
|
||||
if ((!formValues.models || formValues.models.length === 0) && formValues.user_role !== "proxy_admin") {
|
||||
console.log("formValues.user_role", formValues.user_role)
|
||||
// If models is empty or undefined, set it to "no-default-models"
|
||||
formValues.models = ["no-default-models"];
|
||||
}
|
||||
|
|
|
@ -154,9 +154,10 @@ export function useFilterLogic({
|
|||
'Key Alias': ''
|
||||
});
|
||||
|
||||
// Reset team and org selections
|
||||
// Reset selections
|
||||
setSelectedTeam(null);
|
||||
setCurrentOrg(null);
|
||||
setSelectedKeyAlias(null);
|
||||
};
|
||||
|
||||
return {
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from "@tremor/react";
|
||||
import NumericalInput from "./shared/numerical_input";
|
||||
import { ArrowLeftIcon, TrashIcon, KeyIcon } from "@heroicons/react/outline";
|
||||
import { modelDeleteCall, modelUpdateCall, CredentialItem, credentialGetCall, credentialCreateCall, modelInfoCall, modelInfoV1Call } from "./networking";
|
||||
import { modelDeleteCall, modelUpdateCall, CredentialItem, credentialGetCall, credentialCreateCall, modelInfoCall, modelInfoV1Call, modelPatchUpdateCall } from "./networking";
|
||||
import { Button, Form, Input, InputNumber, message, Select, Modal } from "antd";
|
||||
import EditModelModal from "./edit_model/edit_model_modal";
|
||||
import { handleEditModelSubmit } from "./edit_model/edit_model_modal";
|
||||
|
@ -118,6 +118,8 @@ export default function ModelInfoView({
|
|||
try {
|
||||
if (!accessToken) return;
|
||||
setIsSaving(true);
|
||||
|
||||
console.log("values.model_name, ", values.model_name);
|
||||
|
||||
let updatedLitellmParams = {
|
||||
...localModelData.litellm_params,
|
||||
|
@ -149,7 +151,7 @@ export default function ModelInfoView({
|
|||
}
|
||||
};
|
||||
|
||||
await modelUpdateCall(accessToken, updateData);
|
||||
await modelPatchUpdateCall(accessToken, updateData, modelId);
|
||||
|
||||
const updatedModelData = {
|
||||
...localModelData,
|
||||
|
|
|
@ -2493,6 +2493,9 @@ export const keyInfoCall = async (accessToken: String, keys: String[]) => {
|
|||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.text();
|
||||
if (errorData.includes("Invalid proxy server token passed")) {
|
||||
throw new Error("Invalid proxy server token passed");
|
||||
}
|
||||
handleError(errorData);
|
||||
throw new Error("Network response was not ok");
|
||||
}
|
||||
|
@ -3149,6 +3152,49 @@ export const teamUpdateCall = async (
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Patch update a model
|
||||
*
|
||||
* @param accessToken
|
||||
* @param formValues
|
||||
* @returns
|
||||
*/
|
||||
export const modelPatchUpdateCall = async (
|
||||
accessToken: string,
|
||||
formValues: Record<string, any>, // Assuming formValues is an object
|
||||
modelId: string
|
||||
) => {
|
||||
try {
|
||||
console.log("Form Values in modelUpateCall:", formValues); // Log the form values before making the API call
|
||||
|
||||
const url = proxyBaseUrl ? `${proxyBaseUrl}/model/${modelId}/update` : `/model/${modelId}/update`;
|
||||
const response = await fetch(url, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
[globalLitellmHeaderName]: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...formValues, // Include formValues in the request body
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.text();
|
||||
handleError(errorData);
|
||||
console.error("Error update from the server:", errorData);
|
||||
throw new Error("Network response was not ok");
|
||||
}
|
||||
const data = await response.json();
|
||||
console.log("Update model Response:", data);
|
||||
return data;
|
||||
// Handle success - you might want to update some state or UI based on the created key
|
||||
} catch (error) {
|
||||
console.error("Failed to update model:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const modelUpdateCall = async (
|
||||
accessToken: string,
|
||||
formValues: Record<string, any> // Assuming formValues is an object
|
||||
|
|
|
@ -7,7 +7,8 @@ import {
|
|||
getProxyUISettings,
|
||||
Organization,
|
||||
organizationListCall,
|
||||
DEFAULT_ORGANIZATION
|
||||
DEFAULT_ORGANIZATION,
|
||||
keyInfoCall
|
||||
} from "./networking";
|
||||
import { fetchTeams } from "./common_components/fetch_teams";
|
||||
import { Grid, Col, Card, Text, Title } from "@tremor/react";
|
||||
|
@ -192,6 +193,7 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
|||
null,
|
||||
null
|
||||
);
|
||||
|
||||
|
||||
setUserSpendData(response["user_info"]);
|
||||
console.log(`userSpendData: ${JSON.stringify(userSpendData)}`)
|
||||
|
@ -238,8 +240,11 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
|||
"userModels" + userID,
|
||||
JSON.stringify(available_model_names)
|
||||
);
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.error("There was an error fetching the data", error);
|
||||
if (error.message.includes("Invalid proxy server token passed")) {
|
||||
gotoLogin();
|
||||
}
|
||||
// Optionally, update your UI to reflect the error state here as well
|
||||
}
|
||||
};
|
||||
|
@ -249,6 +254,24 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
|||
}
|
||||
}, [userID, token, accessToken, keys, userRole]);
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
// check key health - if it's invalid, redirect to login
|
||||
if (accessToken) {
|
||||
const fetchKeyInfo = async () => {
|
||||
try {
|
||||
const keyInfo = await keyInfoCall(accessToken, [accessToken]);
|
||||
console.log("keyInfo: ", keyInfo);
|
||||
} catch (error: any) {
|
||||
if (error.message.includes("Invalid proxy server token passed")) {
|
||||
gotoLogin();
|
||||
}
|
||||
}
|
||||
}
|
||||
fetchKeyInfo();
|
||||
}
|
||||
}, [accessToken]);
|
||||
|
||||
useEffect(() => {
|
||||
console.log(`currentOrg: ${JSON.stringify(currentOrg)}, accessToken: ${accessToken}, userID: ${userID}, userRole: ${userRole}`)
|
||||
if (accessToken) {
|
||||
|
@ -295,24 +318,68 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
|||
)
|
||||
}
|
||||
|
||||
|
||||
if (token == null) {
|
||||
// user is not logged in as yet
|
||||
console.log("All cookies before redirect:", document.cookie);
|
||||
|
||||
function gotoLogin() {
|
||||
// Clear token cookies using the utility function
|
||||
clearTokenCookies();
|
||||
|
||||
const url = proxyBaseUrl
|
||||
? `${proxyBaseUrl}/sso/key/generate`
|
||||
: `/sso/key/generate`;
|
||||
|
||||
|
||||
console.log("Full URL:", url);
|
||||
window.location.href = url;
|
||||
window.location.href = url;
|
||||
|
||||
return null;
|
||||
} else if (accessToken == null) {
|
||||
}
|
||||
|
||||
if (token == null) {
|
||||
// user is not logged in as yet
|
||||
console.log("All cookies before redirect:", document.cookie);
|
||||
|
||||
// Clear token cookies using the utility function
|
||||
gotoLogin();
|
||||
return null;
|
||||
} else {
|
||||
// Check if token is expired
|
||||
try {
|
||||
const decoded = jwtDecode(token) as { [key: string]: any };
|
||||
console.log("Decoded token:", decoded);
|
||||
const expTime = decoded.exp;
|
||||
const currentTime = Math.floor(Date.now() / 1000);
|
||||
|
||||
if (expTime && currentTime >= expTime) {
|
||||
console.log("Token expired, redirecting to login");
|
||||
|
||||
// Clear token cookies
|
||||
clearTokenCookies();
|
||||
|
||||
const url = proxyBaseUrl
|
||||
? `${proxyBaseUrl}/sso/key/generate`
|
||||
: `/sso/key/generate`;
|
||||
|
||||
console.log("Full URL for expired token:", url);
|
||||
window.location.href = url;
|
||||
|
||||
return null;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error decoding token:", error);
|
||||
// If there's an error decoding the token, consider it invalid
|
||||
clearTokenCookies();
|
||||
|
||||
const url = proxyBaseUrl
|
||||
? `${proxyBaseUrl}/sso/key/generate`
|
||||
: `/sso/key/generate`;
|
||||
|
||||
console.log("Full URL after token decode error:", url);
|
||||
window.location.href = url;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
if (accessToken == null) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
if (userID == null) {
|
||||
|
|
168
ui/litellm-dashboard/src/components/user_edit_view.tsx
Normal file
168
ui/litellm-dashboard/src/components/user_edit_view.tsx
Normal file
|
@ -0,0 +1,168 @@
|
|||
import React from "react";
|
||||
import { Form, InputNumber, Select, Tooltip } from "antd";
|
||||
import { TextInput, Textarea, SelectItem } from "@tremor/react";
|
||||
import { Button } from "@tremor/react";
|
||||
import { getModelDisplayName } from "./key_team_helpers/fetch_available_models_team_key";
|
||||
import { all_admin_roles } from "../utils/roles";
|
||||
import { InfoCircleOutlined } from "@ant-design/icons";
|
||||
interface UserEditViewProps {
|
||||
userData: any;
|
||||
onCancel: () => void;
|
||||
onSubmit: (values: any) => void;
|
||||
teams: any[] | null;
|
||||
accessToken: string | null;
|
||||
userID: string | null;
|
||||
userRole: string | null;
|
||||
userModels: string[];
|
||||
possibleUIRoles: Record<string, Record<string, string>> | null;
|
||||
}
|
||||
|
||||
export function UserEditView({
|
||||
userData,
|
||||
onCancel,
|
||||
onSubmit,
|
||||
teams,
|
||||
accessToken,
|
||||
userID,
|
||||
userRole,
|
||||
userModels,
|
||||
possibleUIRoles,
|
||||
}: UserEditViewProps) {
|
||||
const [form] = Form.useForm();
|
||||
|
||||
// Set initial form values
|
||||
React.useEffect(() => {
|
||||
form.setFieldsValue({
|
||||
user_id: userData.user_id,
|
||||
user_email: userData.user_info?.user_email,
|
||||
user_role: userData.user_info?.user_role,
|
||||
models: userData.user_info?.models || [],
|
||||
max_budget: userData.user_info?.max_budget,
|
||||
metadata: userData.user_info?.metadata ? JSON.stringify(userData.user_info.metadata, null, 2) : undefined,
|
||||
});
|
||||
}, [userData, form]);
|
||||
|
||||
const handleSubmit = (values: any) => {
|
||||
// Convert metadata back to an object if it exists and is a string
|
||||
if (values.metadata && typeof values.metadata === "string") {
|
||||
try {
|
||||
values.metadata = JSON.parse(values.metadata);
|
||||
} catch (error) {
|
||||
console.error("Error parsing metadata JSON:", error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
onSubmit(values);
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
form={form}
|
||||
onFinish={handleSubmit}
|
||||
layout="vertical"
|
||||
>
|
||||
<Form.Item
|
||||
label="User ID"
|
||||
name="user_id"
|
||||
>
|
||||
<TextInput disabled />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="Email"
|
||||
name="user_email"
|
||||
>
|
||||
<TextInput />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item label={
|
||||
<span>
|
||||
Global Proxy Role{' '}
|
||||
<Tooltip title="This is the role that the user will globally on the proxy. This role is independent of any team/org specific roles.">
|
||||
<InfoCircleOutlined/>
|
||||
</Tooltip>
|
||||
</span>
|
||||
}
|
||||
name="user_role">
|
||||
<Select>
|
||||
{possibleUIRoles &&
|
||||
Object.entries(possibleUIRoles).map(
|
||||
([role, { ui_label, description }]) => (
|
||||
<SelectItem key={role} value={role} title={ui_label}>
|
||||
<div className="flex">
|
||||
{ui_label}{" "}
|
||||
<p
|
||||
className="ml-2"
|
||||
style={{ color: "gray", fontSize: "12px" }}
|
||||
>
|
||||
{description}
|
||||
</p>
|
||||
</div>
|
||||
</SelectItem>
|
||||
),
|
||||
)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Personal Models{' '}
|
||||
<Tooltip title="Select which models this user can access outside of team-scope. Choose 'All Proxy Models' to grant access to all models available on the proxy.">
|
||||
<InfoCircleOutlined style={{ marginLeft: '4px' }} />
|
||||
</Tooltip>
|
||||
</span>
|
||||
}
|
||||
name="models"
|
||||
>
|
||||
<Select
|
||||
mode="multiple"
|
||||
placeholder="Select models"
|
||||
style={{ width: "100%" }}
|
||||
disabled={!all_admin_roles.includes(userData.user_info?.user_role || "")}
|
||||
|
||||
>
|
||||
<Select.Option key="all-proxy-models" value="all-proxy-models">
|
||||
All Proxy Models
|
||||
</Select.Option>
|
||||
{userModels.map((model) => (
|
||||
<Select.Option key={model} value={model}>
|
||||
{getModelDisplayName(model)}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="Max Budget (USD)"
|
||||
name="max_budget"
|
||||
>
|
||||
<InputNumber
|
||||
step={0.01}
|
||||
precision={2}
|
||||
style={{ width: "100%" }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label="Metadata"
|
||||
name="metadata"
|
||||
>
|
||||
<Textarea
|
||||
rows={4}
|
||||
placeholder="Enter metadata as JSON"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<div className="flex justify-end space-x-2">
|
||||
<Button variant="secondary" onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="submit">
|
||||
Save Changes
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
);
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue