mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Merge branch 'main' into litellm_dynamic_tpm_limits
This commit is contained in:
commit
961e7ac95d
65 changed files with 689 additions and 1186 deletions
|
@ -2,9 +2,9 @@ import Tabs from '@theme/Tabs';
|
||||||
import TabItem from '@theme/TabItem';
|
import TabItem from '@theme/TabItem';
|
||||||
|
|
||||||
# Anthropic
|
# Anthropic
|
||||||
LiteLLM supports
|
LiteLLM supports all anthropic models.
|
||||||
|
|
||||||
- `claude-3.5`
|
- `claude-3.5` (`claude-3-5-sonnet-20240620`)
|
||||||
- `claude-3` (`claude-3-haiku-20240307`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`)
|
- `claude-3` (`claude-3-haiku-20240307`, `claude-3-opus-20240229`, `claude-3-sonnet-20240229`)
|
||||||
- `claude-2`
|
- `claude-2`
|
||||||
- `claude-2.1`
|
- `claude-2.1`
|
||||||
|
|
|
@ -24,6 +24,7 @@ import litellm.types
|
||||||
from litellm._logging import verbose_logger, verbose_proxy_logger
|
from litellm._logging import verbose_logger, verbose_proxy_logger
|
||||||
from litellm.caching import DualCache
|
from litellm.caching import DualCache
|
||||||
from litellm.integrations.custom_logger import CustomLogger
|
from litellm.integrations.custom_logger import CustomLogger
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging
|
||||||
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler
|
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler
|
||||||
from litellm.proxy._types import AlertType, CallInfo, UserAPIKeyAuth, WebhookEvent
|
from litellm.proxy._types import AlertType, CallInfo, UserAPIKeyAuth, WebhookEvent
|
||||||
from litellm.types.router import LiteLLM_Params
|
from litellm.types.router import LiteLLM_Params
|
||||||
|
@ -229,7 +230,7 @@ class SlackAlerting(CustomLogger):
|
||||||
"db_exceptions",
|
"db_exceptions",
|
||||||
]
|
]
|
||||||
|
|
||||||
def _add_langfuse_trace_id_to_alert(
|
async def _add_langfuse_trace_id_to_alert(
|
||||||
self,
|
self,
|
||||||
request_data: Optional[dict] = None,
|
request_data: Optional[dict] = None,
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
|
@ -242,21 +243,19 @@ class SlackAlerting(CustomLogger):
|
||||||
-> litellm_call_id
|
-> litellm_call_id
|
||||||
"""
|
"""
|
||||||
# do nothing for now
|
# do nothing for now
|
||||||
if request_data is not None:
|
if (
|
||||||
trace_id = None
|
request_data is not None
|
||||||
if (
|
and request_data.get("litellm_logging_obj", None) is not None
|
||||||
request_data.get("metadata", {}).get("existing_trace_id", None)
|
):
|
||||||
is not None
|
trace_id: Optional[str] = None
|
||||||
):
|
litellm_logging_obj: Logging = request_data["litellm_logging_obj"]
|
||||||
trace_id = request_data["metadata"]["existing_trace_id"]
|
|
||||||
elif request_data.get("metadata", {}).get("trace_id", None) is not None:
|
for _ in range(3):
|
||||||
trace_id = request_data["metadata"]["trace_id"]
|
trace_id = litellm_logging_obj._get_trace_id(service_name="langfuse")
|
||||||
elif request_data.get("litellm_logging_obj", None) is not None and hasattr(
|
if trace_id is not None:
|
||||||
request_data["litellm_logging_obj"], "model_call_details"
|
break
|
||||||
):
|
await asyncio.sleep(3) # wait 3s before retrying for trace id
|
||||||
trace_id = request_data["litellm_logging_obj"].model_call_details[
|
|
||||||
"litellm_call_id"
|
|
||||||
]
|
|
||||||
if litellm.litellm_core_utils.litellm_logging.langFuseLogger is not None:
|
if litellm.litellm_core_utils.litellm_logging.langFuseLogger is not None:
|
||||||
base_url = (
|
base_url = (
|
||||||
litellm.litellm_core_utils.litellm_logging.langFuseLogger.Langfuse.base_url
|
litellm.litellm_core_utils.litellm_logging.langFuseLogger.Langfuse.base_url
|
||||||
|
@ -645,7 +644,7 @@ class SlackAlerting(CustomLogger):
|
||||||
)
|
)
|
||||||
|
|
||||||
if "langfuse" in litellm.success_callback:
|
if "langfuse" in litellm.success_callback:
|
||||||
langfuse_url = self._add_langfuse_trace_id_to_alert(
|
langfuse_url = await self._add_langfuse_trace_id_to_alert(
|
||||||
request_data=request_data,
|
request_data=request_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import sys
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Callable, Dict, List, Optional
|
from typing import Any, Callable, Dict, List, Literal, Optional
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm import (
|
from litellm import (
|
||||||
|
@ -19,7 +19,8 @@ from litellm import (
|
||||||
turn_off_message_logging,
|
turn_off_message_logging,
|
||||||
verbose_logger,
|
verbose_logger,
|
||||||
)
|
)
|
||||||
from litellm.caching import DualCache, S3Cache
|
|
||||||
|
from litellm.caching import InMemoryCache, S3Cache, DualCache
|
||||||
from litellm.integrations.custom_logger import CustomLogger
|
from litellm.integrations.custom_logger import CustomLogger
|
||||||
from litellm.litellm_core_utils.redact_messages import (
|
from litellm.litellm_core_utils.redact_messages import (
|
||||||
redact_message_input_output_from_logging,
|
redact_message_input_output_from_logging,
|
||||||
|
@ -111,7 +112,25 @@ additional_details: Optional[Dict[str, str]] = {}
|
||||||
local_cache: Optional[Dict[str, str]] = {}
|
local_cache: Optional[Dict[str, str]] = {}
|
||||||
last_fetched_at = None
|
last_fetched_at = None
|
||||||
last_fetched_at_keys = None
|
last_fetched_at_keys = None
|
||||||
|
|
||||||
|
|
||||||
####
|
####
|
||||||
|
class ServiceTraceIDCache:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.cache = InMemoryCache()
|
||||||
|
|
||||||
|
def get_cache(self, litellm_call_id: str, service_name: str) -> Optional[str]:
|
||||||
|
key_name = "{}:{}".format(service_name, litellm_call_id)
|
||||||
|
response = self.cache.get_cache(key=key_name)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def set_cache(self, litellm_call_id: str, service_name: str, trace_id: str) -> None:
|
||||||
|
key_name = "{}:{}".format(service_name, litellm_call_id)
|
||||||
|
self.cache.set_cache(key=key_name, value=trace_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
in_memory_trace_id_cache = ServiceTraceIDCache()
|
||||||
|
|
||||||
|
|
||||||
class Logging:
|
class Logging:
|
||||||
|
@ -155,7 +174,7 @@ class Logging:
|
||||||
new_messages.append({"role": "user", "content": m})
|
new_messages.append({"role": "user", "content": m})
|
||||||
messages = new_messages
|
messages = new_messages
|
||||||
self.model = model
|
self.model = model
|
||||||
self.messages = messages
|
self.messages = copy.deepcopy(messages)
|
||||||
self.stream = stream
|
self.stream = stream
|
||||||
self.start_time = start_time # log the call start time
|
self.start_time = start_time # log the call start time
|
||||||
self.call_type = call_type
|
self.call_type = call_type
|
||||||
|
@ -245,10 +264,17 @@ class Logging:
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = {}
|
headers = {}
|
||||||
data = additional_args.get("complete_input_dict", {})
|
data = additional_args.get("complete_input_dict", {})
|
||||||
api_base = additional_args.get("api_base", "")
|
api_base = str(additional_args.get("api_base", ""))
|
||||||
self.model_call_details["litellm_params"]["api_base"] = str(
|
if "key=" in api_base:
|
||||||
api_base
|
# Find the position of "key=" in the string
|
||||||
) # used for alerting
|
key_index = api_base.find("key=") + 4
|
||||||
|
# Mask the last 5 characters after "key="
|
||||||
|
masked_api_base = (
|
||||||
|
api_base[:key_index] + "*" * 5 + api_base[key_index + 5 :]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
masked_api_base = api_base
|
||||||
|
self.model_call_details["litellm_params"]["api_base"] = masked_api_base
|
||||||
masked_headers = {
|
masked_headers = {
|
||||||
k: (
|
k: (
|
||||||
(v[:-44] + "*" * 44)
|
(v[:-44] + "*" * 44)
|
||||||
|
@ -821,7 +847,7 @@ class Logging:
|
||||||
langfuse_secret=self.langfuse_secret,
|
langfuse_secret=self.langfuse_secret,
|
||||||
langfuse_host=self.langfuse_host,
|
langfuse_host=self.langfuse_host,
|
||||||
)
|
)
|
||||||
langFuseLogger.log_event(
|
_response = langFuseLogger.log_event(
|
||||||
kwargs=kwargs,
|
kwargs=kwargs,
|
||||||
response_obj=result,
|
response_obj=result,
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
|
@ -829,6 +855,14 @@ class Logging:
|
||||||
user_id=kwargs.get("user", None),
|
user_id=kwargs.get("user", None),
|
||||||
print_verbose=print_verbose,
|
print_verbose=print_verbose,
|
||||||
)
|
)
|
||||||
|
if _response is not None and isinstance(_response, dict):
|
||||||
|
_trace_id = _response.get("trace_id", None)
|
||||||
|
if _trace_id is not None:
|
||||||
|
in_memory_trace_id_cache.set_cache(
|
||||||
|
litellm_call_id=self.litellm_call_id,
|
||||||
|
service_name="langfuse",
|
||||||
|
trace_id=_trace_id,
|
||||||
|
)
|
||||||
if callback == "datadog":
|
if callback == "datadog":
|
||||||
global dataDogLogger
|
global dataDogLogger
|
||||||
verbose_logger.debug("reaches datadog for success logging!")
|
verbose_logger.debug("reaches datadog for success logging!")
|
||||||
|
@ -1607,7 +1641,7 @@ class Logging:
|
||||||
langfuse_secret=self.langfuse_secret,
|
langfuse_secret=self.langfuse_secret,
|
||||||
langfuse_host=self.langfuse_host,
|
langfuse_host=self.langfuse_host,
|
||||||
)
|
)
|
||||||
langFuseLogger.log_event(
|
_response = langFuseLogger.log_event(
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
end_time=end_time,
|
end_time=end_time,
|
||||||
response_obj=None,
|
response_obj=None,
|
||||||
|
@ -1617,6 +1651,14 @@ class Logging:
|
||||||
level="ERROR",
|
level="ERROR",
|
||||||
kwargs=self.model_call_details,
|
kwargs=self.model_call_details,
|
||||||
)
|
)
|
||||||
|
if _response is not None and isinstance(_response, dict):
|
||||||
|
_trace_id = _response.get("trace_id", None)
|
||||||
|
if _trace_id is not None:
|
||||||
|
in_memory_trace_id_cache.set_cache(
|
||||||
|
litellm_call_id=self.litellm_call_id,
|
||||||
|
service_name="langfuse",
|
||||||
|
trace_id=_trace_id,
|
||||||
|
)
|
||||||
if callback == "traceloop":
|
if callback == "traceloop":
|
||||||
traceloopLogger.log_event(
|
traceloopLogger.log_event(
|
||||||
start_time=start_time,
|
start_time=start_time,
|
||||||
|
@ -1721,6 +1763,24 @@ class Logging:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _get_trace_id(self, service_name: Literal["langfuse"]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
For the given service (e.g. langfuse), return the trace_id actually logged.
|
||||||
|
|
||||||
|
Used for constructing the url in slack alerting.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- str: The logged trace id
|
||||||
|
- None: If trace id not yet emitted.
|
||||||
|
"""
|
||||||
|
trace_id: Optional[str] = None
|
||||||
|
if service_name == "langfuse":
|
||||||
|
trace_id = in_memory_trace_id_cache.get_cache(
|
||||||
|
litellm_call_id=self.litellm_call_id, service_name=service_name
|
||||||
|
)
|
||||||
|
|
||||||
|
return trace_id
|
||||||
|
|
||||||
|
|
||||||
def set_callbacks(callback_list, function_id=None):
|
def set_callbacks(callback_list, function_id=None):
|
||||||
"""
|
"""
|
||||||
|
|
1
litellm/proxy/_experimental/out/404.html
Normal file
1
litellm/proxy/_experimental/out/404.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +0,0 @@
|
||||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{61994:function(e,s,l){Promise.resolve().then(l.bind(l,667))},667:function(e,s,l){"use strict";l.r(s),l.d(s,{default:function(){return _}});var t=l(3827),a=l(64090),r=l(47907),n=l(16450),i=l(18190),o=l(13810),u=l(10384),c=l(46453),d=l(71801),m=l(52273),h=l(42440),x=l(30953),j=l(777),p=l(37963),f=l(60620),g=l(1861);function _(){let[e]=f.Z.useForm(),s=(0,r.useSearchParams)();s.get("token");let l=s.get("id"),[_,Z]=(0,a.useState)(null),[w,b]=(0,a.useState)(""),[N,S]=(0,a.useState)(""),[k,y]=(0,a.useState)(null),[v,E]=(0,a.useState)(""),[F,I]=(0,a.useState)("");return(0,a.useEffect)(()=>{l&&(0,j.W_)(l).then(e=>{let s=e.login_url;console.log("login_url:",s),E(s);let l=e.token,t=(0,p.o)(l);I(l),console.log("decoded:",t),Z(t.key),console.log("decoded user email:",t.user_email),S(t.user_email),y(t.user_id)})},[l]),(0,t.jsx)("div",{className:"mx-auto max-w-md mt-10",children:(0,t.jsxs)(o.Z,{children:[(0,t.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,t.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,t.jsx)(d.Z,{children:"Claim your user account to login to Admin UI."}),(0,t.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,t.jsxs)(c.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,t.jsx)(u.Z,{children:"SSO is under the Enterprise Tirer."}),(0,t.jsx)(u.Z,{children:(0,t.jsx)(n.Z,{variant:"primary",className:"mb-2",children:(0,t.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,t.jsxs)(f.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",_,"token:",F,"formValues:",e),_&&F&&(e.user_email=N,k&&l&&(0,j.m_)(_,l,k,e.password).then(e=>{var s;let l="/ui/";console.log("redirecting to:",l+="?userID="+((null===(s=e.data)||void 0===s?void 0:s.user_id)||e.user_id)+"&token="+F),window.location.href=l}))},children:[(0,t.jsxs)(t.Fragment,{children:[(0,t.jsx)(f.Z.Item,{label:"Email Address",name:"user_email",children:(0,t.jsx)(m.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,t.jsx)(f.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,t.jsx)(m.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,t.jsx)("div",{className:"mt-10",children:(0,t.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}}},function(e){e.O(0,[665,294,684,777,971,69,744],function(){return e(e.s=61994)}),_N_E=e.O()}]);
|
|
|
@ -0,0 +1 @@
|
||||||
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{61994:function(e,s,t){Promise.resolve().then(t.bind(t,667))},667:function(e,s,t){"use strict";t.r(s),t.d(s,{default:function(){return _}});var l=t(3827),n=t(64090),a=t(47907),r=t(16450),i=t(18190),o=t(13810),c=t(10384),u=t(46453),d=t(71801),m=t(52273),h=t(42440),x=t(30953),p=t(777),f=t(37963),j=t(60620),g=t(1861);function _(){let[e]=j.Z.useForm(),s=(0,a.useSearchParams)();!function(e){console.log("COOKIES",document.cookie);let s=document.cookie.split("; ").find(s=>s.startsWith(e+"="));s&&s.split("=")[1]}("token");let t=s.get("id"),[_,Z]=(0,n.useState)(null),[k,w]=(0,n.useState)(""),[S,b]=(0,n.useState)(""),[N,y]=(0,n.useState)(null),[v,E]=(0,n.useState)(""),[I,O]=(0,n.useState)("");return(0,n.useEffect)(()=>{t&&(0,p.W_)(t).then(e=>{let s=e.login_url;console.log("login_url:",s),E(s);let t=e.token,l=(0,f.o)(t);O(t),console.log("decoded:",l),Z(l.key),console.log("decoded user email:",l.user_email),b(l.user_email),y(l.user_id)})},[t]),(0,l.jsx)("div",{className:"mx-auto max-w-md mt-10",children:(0,l.jsxs)(o.Z,{children:[(0,l.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,l.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,l.jsx)(d.Z,{children:"Claim your user account to login to Admin UI."}),(0,l.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,l.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,l.jsx)(c.Z,{children:"SSO is under the Enterprise Tirer."}),(0,l.jsx)(c.Z,{children:(0,l.jsx)(r.Z,{variant:"primary",className:"mb-2",children:(0,l.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,l.jsxs)(j.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",_,"token:",I,"formValues:",e),_&&I&&(e.user_email=S,N&&t&&(0,p.m_)(_,t,N,e.password).then(e=>{var s;let t="/ui/";console.log("redirecting to:",t+="?userID="+((null===(s=e.data)||void 0===s?void 0:s.user_id)||e.user_id)+"&token="+I),window.location.href=t}))},children:[(0,l.jsxs)(l.Fragment,{children:[(0,l.jsx)(j.Z.Item,{label:"Email Address",name:"user_email",children:(0,l.jsx)(m.Z,{type:"email",disabled:!0,value:S,defaultValue:S,className:"max-w-md"})}),(0,l.jsx)(j.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,l.jsx)(m.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,l.jsx)("div",{className:"mt-10",children:(0,l.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}}},function(e){e.O(0,[665,294,684,777,971,69,744],function(){return e(e.s=61994)}),_N_E=e.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/b65d5698d1a1958d.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/0f6908625573deae.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-6f7793f21bbb2fbe.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-f960ab1e6d32b002.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-04708d7d4a17c1ee.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-6f7793f21bbb2fbe.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/b65d5698d1a1958d.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[68101,[\"936\",\"static/chunks/2f6dbc85-052c4579f80d66ae.js\",\"294\",\"static/chunks/294-0e35509d5ca95267.js\",\"131\",\"static/chunks/131-6a03368053f9d26d.js\",\"684\",\"static/chunks/684-bb2d2f93d92acb0b.js\",\"759\",\"static/chunks/759-83a8bdddfe32b5d9.js\",\"777\",\"static/chunks/777-71fb78fdb4897cc3.js\",\"931\",\"static/chunks/app/page-626098dc8320c801.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/b65d5698d1a1958d.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"S9_6IC27HNWjJtr-LNaAO\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_12bbc4\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-a8fd417ac0c6c8a5.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-f960ab1e6d32b002.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-04708d7d4a17c1ee.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-a8fd417ac0c6c8a5.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/0f6908625573deae.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[48951,[\"936\",\"static/chunks/2f6dbc85-052c4579f80d66ae.js\",\"294\",\"static/chunks/294-0e35509d5ca95267.js\",\"131\",\"static/chunks/131-6a03368053f9d26d.js\",\"684\",\"static/chunks/684-bb2d2f93d92acb0b.js\",\"759\",\"static/chunks/759-83a8bdddfe32b5d9.js\",\"777\",\"static/chunks/777-f76791513e294b30.js\",\"931\",\"static/chunks/app/page-42b04008af7da690.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/0f6908625573deae.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"DahySukItzAH9ZoOiMmQB\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_12bbc4\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[68101,["936","static/chunks/2f6dbc85-052c4579f80d66ae.js","294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","684","static/chunks/684-bb2d2f93d92acb0b.js","759","static/chunks/759-83a8bdddfe32b5d9.js","777","static/chunks/777-71fb78fdb4897cc3.js","931","static/chunks/app/page-626098dc8320c801.js"],""]
|
3:I[48951,["936","static/chunks/2f6dbc85-052c4579f80d66ae.js","294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","684","static/chunks/684-bb2d2f93d92acb0b.js","759","static/chunks/759-83a8bdddfe32b5d9.js","777","static/chunks/777-f76791513e294b30.js","931","static/chunks/app/page-42b04008af7da690.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
1
litellm/proxy/_experimental/out/model_hub.html
Normal file
1
litellm/proxy/_experimental/out/model_hub.html
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[87494,["294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","777","static/chunks/777-71fb78fdb4897cc3.js","418","static/chunks/app/model_hub/page-4cb65c32467214b5.js"],""]
|
3:I[87494,["294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","777","static/chunks/777-f76791513e294b30.js","418","static/chunks/app/model_hub/page-ba7819b59161aa64.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
1
litellm/proxy/_experimental/out/onboarding.html
Normal file
1
litellm/proxy/_experimental/out/onboarding.html
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[667,["665","static/chunks/3014691f-589a5f4865c3822f.js","294","static/chunks/294-0e35509d5ca95267.js","684","static/chunks/684-bb2d2f93d92acb0b.js","777","static/chunks/777-71fb78fdb4897cc3.js","461","static/chunks/app/onboarding/page-664c7288e11fff5a.js"],""]
|
3:I[667,["665","static/chunks/3014691f-589a5f4865c3822f.js","294","static/chunks/294-0e35509d5ca95267.js","684","static/chunks/684-bb2d2f93d92acb0b.js","777","static/chunks/777-f76791513e294b30.js","461","static/chunks/app/onboarding/page-fd30ae439831db99.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
|
@ -188,6 +188,9 @@ class LiteLLMRoutes(enum.Enum):
|
||||||
# audio transcription
|
# audio transcription
|
||||||
"/audio/transcriptions",
|
"/audio/transcriptions",
|
||||||
"/v1/audio/transcriptions",
|
"/v1/audio/transcriptions",
|
||||||
|
# audio Speech
|
||||||
|
"/audio/speech",
|
||||||
|
"/v1/audio/speech",
|
||||||
# moderations
|
# moderations
|
||||||
"/moderations",
|
"/moderations",
|
||||||
"/v1/moderations",
|
"/v1/moderations",
|
||||||
|
|
|
@ -165,9 +165,10 @@ from litellm.proxy.secret_managers.aws_secret_manager import (
|
||||||
load_aws_secret_manager,
|
load_aws_secret_manager,
|
||||||
)
|
)
|
||||||
from litellm.proxy.secret_managers.google_kms import load_google_kms
|
from litellm.proxy.secret_managers.google_kms import load_google_kms
|
||||||
from litellm.proxy.spend_reporting_endpoints.spend_management_endpoints import (
|
from litellm.proxy.spend_tracking.spend_management_endpoints import (
|
||||||
router as spend_management_router,
|
router as spend_management_router,
|
||||||
)
|
)
|
||||||
|
from litellm.proxy.spend_tracking.spend_tracking_utils import get_logging_payload
|
||||||
from litellm.proxy.utils import (
|
from litellm.proxy.utils import (
|
||||||
DBClient,
|
DBClient,
|
||||||
PrismaClient,
|
PrismaClient,
|
||||||
|
@ -180,7 +181,6 @@ from litellm.proxy.utils import (
|
||||||
encrypt_value,
|
encrypt_value,
|
||||||
get_error_message_str,
|
get_error_message_str,
|
||||||
get_instance_fn,
|
get_instance_fn,
|
||||||
get_logging_payload,
|
|
||||||
hash_token,
|
hash_token,
|
||||||
html_form,
|
html_form,
|
||||||
missing_keys_html_form,
|
missing_keys_html_form,
|
||||||
|
@ -7509,6 +7509,12 @@ async def login(request: Request):
|
||||||
litellm_dashboard_ui += "/ui/"
|
litellm_dashboard_ui += "/ui/"
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
|
if litellm_master_key_hash is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail={"error": "No master key set, please set LITELLM_MASTER_KEY"},
|
||||||
|
)
|
||||||
|
|
||||||
jwt_token = jwt.encode(
|
jwt_token = jwt.encode(
|
||||||
{
|
{
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
|
@ -7518,11 +7524,13 @@ async def login(request: Request):
|
||||||
"login_method": "username_password",
|
"login_method": "username_password",
|
||||||
"premium_user": premium_user,
|
"premium_user": premium_user,
|
||||||
},
|
},
|
||||||
"secret",
|
litellm_master_key_hash,
|
||||||
algorithm="HS256",
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
litellm_dashboard_ui += "?userID=" + user_id + "&token=" + jwt_token
|
litellm_dashboard_ui += "?userID=" + user_id
|
||||||
return RedirectResponse(url=litellm_dashboard_ui, status_code=303)
|
redirect_response = RedirectResponse(url=litellm_dashboard_ui, status_code=303)
|
||||||
|
redirect_response.set_cookie(key="token", value=jwt_token)
|
||||||
|
return redirect_response
|
||||||
elif _user_row is not None:
|
elif _user_row is not None:
|
||||||
"""
|
"""
|
||||||
When sharing invite links
|
When sharing invite links
|
||||||
|
@ -7571,6 +7579,14 @@ async def login(request: Request):
|
||||||
litellm_dashboard_ui += "/ui/"
|
litellm_dashboard_ui += "/ui/"
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
|
if litellm_master_key_hash is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail={
|
||||||
|
"error": "No master key set, please set LITELLM_MASTER_KEY"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
jwt_token = jwt.encode(
|
jwt_token = jwt.encode(
|
||||||
{
|
{
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
|
@ -7580,11 +7596,15 @@ async def login(request: Request):
|
||||||
"login_method": "username_password",
|
"login_method": "username_password",
|
||||||
"premium_user": premium_user,
|
"premium_user": premium_user,
|
||||||
},
|
},
|
||||||
"secret",
|
litellm_master_key_hash,
|
||||||
algorithm="HS256",
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
litellm_dashboard_ui += "?userID=" + user_id + "&token=" + jwt_token
|
litellm_dashboard_ui += "?userID=" + user_id
|
||||||
return RedirectResponse(url=litellm_dashboard_ui, status_code=303)
|
redirect_response = RedirectResponse(
|
||||||
|
url=litellm_dashboard_ui, status_code=303
|
||||||
|
)
|
||||||
|
redirect_response.set_cookie(key="token", value=jwt_token)
|
||||||
|
return redirect_response
|
||||||
else:
|
else:
|
||||||
raise ProxyException(
|
raise ProxyException(
|
||||||
message=f"Invalid credentials used to access UI. Passed in username: {username}, passed in password: {password}.\nNot valid credentials for {username}",
|
message=f"Invalid credentials used to access UI. Passed in username: {username}, passed in password: {password}.\nNot valid credentials for {username}",
|
||||||
|
@ -7695,6 +7715,12 @@ async def onboarding(invite_link: str):
|
||||||
litellm_dashboard_ui += "/ui/onboarding"
|
litellm_dashboard_ui += "/ui/onboarding"
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
|
if litellm_master_key_hash is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail={"error": "No master key set, please set LITELLM_MASTER_KEY"},
|
||||||
|
)
|
||||||
|
|
||||||
jwt_token = jwt.encode(
|
jwt_token = jwt.encode(
|
||||||
{
|
{
|
||||||
"user_id": user_obj.user_id,
|
"user_id": user_obj.user_id,
|
||||||
|
@ -7704,7 +7730,7 @@ async def onboarding(invite_link: str):
|
||||||
"login_method": "username_password",
|
"login_method": "username_password",
|
||||||
"premium_user": premium_user,
|
"premium_user": premium_user,
|
||||||
},
|
},
|
||||||
"secret",
|
litellm_master_key_hash,
|
||||||
algorithm="HS256",
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -8115,6 +8141,12 @@ async def auth_callback(request: Request):
|
||||||
|
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
|
if litellm_master_key_hash is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail={"error": "No master key set, please set LITELLM_MASTER_KEY"},
|
||||||
|
)
|
||||||
|
|
||||||
jwt_token = jwt.encode(
|
jwt_token = jwt.encode(
|
||||||
{
|
{
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
|
@ -8124,11 +8156,13 @@ async def auth_callback(request: Request):
|
||||||
"login_method": "sso",
|
"login_method": "sso",
|
||||||
"premium_user": premium_user,
|
"premium_user": premium_user,
|
||||||
},
|
},
|
||||||
"secret",
|
litellm_master_key_hash,
|
||||||
algorithm="HS256",
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
litellm_dashboard_ui += "?userID=" + user_id + "&token=" + jwt_token
|
litellm_dashboard_ui += "?userID=" + user_id
|
||||||
return RedirectResponse(url=litellm_dashboard_ui)
|
redirect_response = RedirectResponse(url=litellm_dashboard_ui, status_code=303)
|
||||||
|
redirect_response.set_cookie(key="token", value=jwt_token)
|
||||||
|
return redirect_response
|
||||||
|
|
||||||
|
|
||||||
#### INVITATION MANAGEMENT ####
|
#### INVITATION MANAGEMENT ####
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
#### SPEND MANAGEMENT #####
|
#### SPEND MANAGEMENT #####
|
||||||
from typing import Optional, List
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import fastapi
|
||||||
|
from fastapi import APIRouter, Depends, Header, HTTPException, Request, status
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm._logging import verbose_proxy_logger
|
from litellm._logging import verbose_proxy_logger
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
|
||||||
import fastapi
|
|
||||||
from fastapi import Depends, Request, APIRouter, Header, status
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from litellm.proxy._types import *
|
from litellm.proxy._types import *
|
||||||
|
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
@ -227,7 +228,7 @@ async def get_global_activity(
|
||||||
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client, llm_router
|
from litellm.proxy.proxy_server import llm_router, prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if prisma_client is None:
|
if prisma_client is None:
|
||||||
|
@ -355,7 +356,7 @@ async def get_global_activity_model(
|
||||||
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client, llm_router, premium_user
|
from litellm.proxy.proxy_server import llm_router, premium_user, prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if prisma_client is None:
|
if prisma_client is None:
|
||||||
|
@ -500,7 +501,7 @@ async def get_global_activity_exceptions_per_deployment(
|
||||||
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client, llm_router, premium_user
|
from litellm.proxy.proxy_server import llm_router, premium_user, prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if prisma_client is None:
|
if prisma_client is None:
|
||||||
|
@ -634,7 +635,7 @@ async def get_global_activity_exceptions(
|
||||||
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client, llm_router
|
from litellm.proxy.proxy_server import llm_router, prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if prisma_client is None:
|
if prisma_client is None:
|
||||||
|
@ -739,7 +740,7 @@ async def get_global_spend_provider(
|
||||||
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client, llm_router
|
from litellm.proxy.proxy_server import llm_router, prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if prisma_client is None:
|
if prisma_client is None:
|
||||||
|
@ -1091,7 +1092,6 @@ async def global_view_spend_tags(
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from enterprise.utils import ui_get_spend_by_tags
|
from enterprise.utils import ui_get_spend_by_tags
|
||||||
|
|
||||||
from litellm.proxy.proxy_server import prisma_client
|
from litellm.proxy.proxy_server import prisma_client
|
||||||
|
|
||||||
try:
|
try:
|
125
litellm/proxy/spend_tracking/spend_tracking_utils.py
Normal file
125
litellm/proxy/spend_tracking/spend_tracking_utils.py
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
import json
|
||||||
|
import traceback
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.proxy._types import SpendLogsMetadata, SpendLogsPayload
|
||||||
|
from litellm.proxy.utils import hash_token
|
||||||
|
|
||||||
|
|
||||||
|
def get_logging_payload(
|
||||||
|
kwargs, response_obj, start_time, end_time, end_user_id: Optional[str]
|
||||||
|
) -> SpendLogsPayload:
|
||||||
|
from pydantic import Json
|
||||||
|
|
||||||
|
from litellm.proxy._types import LiteLLM_SpendLogs
|
||||||
|
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
f"SpendTable: get_logging_payload - kwargs: {kwargs}\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if kwargs is None:
|
||||||
|
kwargs = {}
|
||||||
|
# standardize this function to be used across, s3, dynamoDB, langfuse logging
|
||||||
|
litellm_params = kwargs.get("litellm_params", {})
|
||||||
|
metadata = (
|
||||||
|
litellm_params.get("metadata", {}) or {}
|
||||||
|
) # if litellm_params['metadata'] == None
|
||||||
|
completion_start_time = kwargs.get("completion_start_time", end_time)
|
||||||
|
call_type = kwargs.get("call_type")
|
||||||
|
cache_hit = kwargs.get("cache_hit", False)
|
||||||
|
usage = response_obj["usage"]
|
||||||
|
if type(usage) == litellm.Usage:
|
||||||
|
usage = dict(usage)
|
||||||
|
id = response_obj.get("id", kwargs.get("litellm_call_id"))
|
||||||
|
api_key = metadata.get("user_api_key", "")
|
||||||
|
if api_key is not None and isinstance(api_key, str) and api_key.startswith("sk-"):
|
||||||
|
# hash the api_key
|
||||||
|
api_key = hash_token(api_key)
|
||||||
|
|
||||||
|
_model_id = metadata.get("model_info", {}).get("id", "")
|
||||||
|
_model_group = metadata.get("model_group", "")
|
||||||
|
|
||||||
|
request_tags = (
|
||||||
|
json.dumps(metadata.get("tags", []))
|
||||||
|
if isinstance(metadata.get("tags", []), list)
|
||||||
|
else "[]"
|
||||||
|
)
|
||||||
|
|
||||||
|
# clean up litellm metadata
|
||||||
|
clean_metadata = SpendLogsMetadata(
|
||||||
|
user_api_key=None,
|
||||||
|
user_api_key_alias=None,
|
||||||
|
user_api_key_team_id=None,
|
||||||
|
user_api_key_user_id=None,
|
||||||
|
user_api_key_team_alias=None,
|
||||||
|
spend_logs_metadata=None,
|
||||||
|
)
|
||||||
|
if isinstance(metadata, dict):
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"getting payload for SpendLogs, available keys in metadata: "
|
||||||
|
+ str(list(metadata.keys()))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Filter the metadata dictionary to include only the specified keys
|
||||||
|
clean_metadata = SpendLogsMetadata(
|
||||||
|
**{ # type: ignore
|
||||||
|
key: metadata[key]
|
||||||
|
for key in SpendLogsMetadata.__annotations__.keys()
|
||||||
|
if key in metadata
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if litellm.cache is not None:
|
||||||
|
cache_key = litellm.cache.get_cache_key(**kwargs)
|
||||||
|
else:
|
||||||
|
cache_key = "Cache OFF"
|
||||||
|
if cache_hit is True:
|
||||||
|
import time
|
||||||
|
|
||||||
|
id = f"{id}_cache_hit{time.time()}" # SpendLogs does not allow duplicate request_id
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload: SpendLogsPayload = SpendLogsPayload(
|
||||||
|
request_id=str(id),
|
||||||
|
call_type=call_type or "",
|
||||||
|
api_key=str(api_key),
|
||||||
|
cache_hit=str(cache_hit),
|
||||||
|
startTime=start_time,
|
||||||
|
endTime=end_time,
|
||||||
|
completionStartTime=completion_start_time,
|
||||||
|
model=kwargs.get("model", "") or "",
|
||||||
|
user=kwargs.get("litellm_params", {})
|
||||||
|
.get("metadata", {})
|
||||||
|
.get("user_api_key_user_id", "")
|
||||||
|
or "",
|
||||||
|
team_id=kwargs.get("litellm_params", {})
|
||||||
|
.get("metadata", {})
|
||||||
|
.get("user_api_key_team_id", "")
|
||||||
|
or "",
|
||||||
|
metadata=json.dumps(clean_metadata),
|
||||||
|
cache_key=cache_key,
|
||||||
|
spend=kwargs.get("response_cost", 0),
|
||||||
|
total_tokens=usage.get("total_tokens", 0),
|
||||||
|
prompt_tokens=usage.get("prompt_tokens", 0),
|
||||||
|
completion_tokens=usage.get("completion_tokens", 0),
|
||||||
|
request_tags=request_tags,
|
||||||
|
end_user=end_user_id or "",
|
||||||
|
api_base=litellm_params.get("api_base", ""),
|
||||||
|
model_group=_model_group,
|
||||||
|
model_id=_model_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"SpendTable: created payload - payload: %s\n\n", payload
|
||||||
|
)
|
||||||
|
|
||||||
|
return payload
|
||||||
|
except Exception as e:
|
||||||
|
verbose_proxy_logger.error(
|
||||||
|
"Error creating spendlogs object - {}\n{}".format(
|
||||||
|
str(e), traceback.format_exc()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise e
|
|
@ -473,7 +473,7 @@ class ProxyLogging:
|
||||||
alerting_metadata = {}
|
alerting_metadata = {}
|
||||||
if request_data is not None:
|
if request_data is not None:
|
||||||
|
|
||||||
_url = self.slack_alerting_instance._add_langfuse_trace_id_to_alert(
|
_url = await self.slack_alerting_instance._add_langfuse_trace_id_to_alert(
|
||||||
request_data=request_data
|
request_data=request_data
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2040,121 +2040,6 @@ def hash_token(token: str):
|
||||||
return hashed_token
|
return hashed_token
|
||||||
|
|
||||||
|
|
||||||
def get_logging_payload(
|
|
||||||
kwargs, response_obj, start_time, end_time, end_user_id: Optional[str]
|
|
||||||
) -> SpendLogsPayload:
|
|
||||||
from pydantic import Json
|
|
||||||
|
|
||||||
from litellm.proxy._types import LiteLLM_SpendLogs
|
|
||||||
|
|
||||||
verbose_proxy_logger.debug(
|
|
||||||
f"SpendTable: get_logging_payload - kwargs: {kwargs}\n\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
if kwargs is None:
|
|
||||||
kwargs = {}
|
|
||||||
# standardize this function to be used across, s3, dynamoDB, langfuse logging
|
|
||||||
litellm_params = kwargs.get("litellm_params", {})
|
|
||||||
metadata = (
|
|
||||||
litellm_params.get("metadata", {}) or {}
|
|
||||||
) # if litellm_params['metadata'] == None
|
|
||||||
completion_start_time = kwargs.get("completion_start_time", end_time)
|
|
||||||
call_type = kwargs.get("call_type")
|
|
||||||
cache_hit = kwargs.get("cache_hit", False)
|
|
||||||
usage = response_obj["usage"]
|
|
||||||
if type(usage) == litellm.Usage:
|
|
||||||
usage = dict(usage)
|
|
||||||
id = response_obj.get("id", kwargs.get("litellm_call_id"))
|
|
||||||
api_key = metadata.get("user_api_key", "")
|
|
||||||
if api_key is not None and isinstance(api_key, str) and api_key.startswith("sk-"):
|
|
||||||
# hash the api_key
|
|
||||||
api_key = hash_token(api_key)
|
|
||||||
|
|
||||||
_model_id = metadata.get("model_info", {}).get("id", "")
|
|
||||||
_model_group = metadata.get("model_group", "")
|
|
||||||
|
|
||||||
# clean up litellm metadata
|
|
||||||
clean_metadata = SpendLogsMetadata(
|
|
||||||
user_api_key=None,
|
|
||||||
user_api_key_alias=None,
|
|
||||||
user_api_key_team_id=None,
|
|
||||||
user_api_key_user_id=None,
|
|
||||||
user_api_key_team_alias=None,
|
|
||||||
spend_logs_metadata=None,
|
|
||||||
)
|
|
||||||
if isinstance(metadata, dict):
|
|
||||||
verbose_proxy_logger.debug(
|
|
||||||
"getting payload for SpendLogs, available keys in metadata: "
|
|
||||||
+ str(list(metadata.keys()))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter the metadata dictionary to include only the specified keys
|
|
||||||
clean_metadata = SpendLogsMetadata(
|
|
||||||
**{ # type: ignore
|
|
||||||
key: metadata[key]
|
|
||||||
for key in SpendLogsMetadata.__annotations__.keys()
|
|
||||||
if key in metadata
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if litellm.cache is not None:
|
|
||||||
cache_key = litellm.cache.get_cache_key(**kwargs)
|
|
||||||
else:
|
|
||||||
cache_key = "Cache OFF"
|
|
||||||
if cache_hit is True:
|
|
||||||
import time
|
|
||||||
|
|
||||||
id = f"{id}_cache_hit{time.time()}" # SpendLogs does not allow duplicate request_id
|
|
||||||
|
|
||||||
try:
|
|
||||||
payload: SpendLogsPayload = SpendLogsPayload(
|
|
||||||
request_id=str(id),
|
|
||||||
call_type=call_type or "",
|
|
||||||
api_key=str(api_key),
|
|
||||||
cache_hit=str(cache_hit),
|
|
||||||
startTime=start_time,
|
|
||||||
endTime=end_time,
|
|
||||||
completionStartTime=completion_start_time,
|
|
||||||
model=kwargs.get("model", "") or "",
|
|
||||||
user=kwargs.get("litellm_params", {})
|
|
||||||
.get("metadata", {})
|
|
||||||
.get("user_api_key_user_id", "")
|
|
||||||
or "",
|
|
||||||
team_id=kwargs.get("litellm_params", {})
|
|
||||||
.get("metadata", {})
|
|
||||||
.get("user_api_key_team_id", "")
|
|
||||||
or "",
|
|
||||||
metadata=json.dumps(clean_metadata),
|
|
||||||
cache_key=cache_key,
|
|
||||||
spend=kwargs.get("response_cost", 0),
|
|
||||||
total_tokens=usage.get("total_tokens", 0),
|
|
||||||
prompt_tokens=usage.get("prompt_tokens", 0),
|
|
||||||
completion_tokens=usage.get("completion_tokens", 0),
|
|
||||||
request_tags=(
|
|
||||||
json.dumps(metadata.get("tags", []))
|
|
||||||
if isinstance(metadata.get("tags", []), dict)
|
|
||||||
else "[]"
|
|
||||||
),
|
|
||||||
end_user=end_user_id or "",
|
|
||||||
api_base=litellm_params.get("api_base", ""),
|
|
||||||
model_group=_model_group,
|
|
||||||
model_id=_model_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
verbose_proxy_logger.debug(
|
|
||||||
"SpendTable: created payload - payload: %s\n\n", payload
|
|
||||||
)
|
|
||||||
|
|
||||||
return payload
|
|
||||||
except Exception as e:
|
|
||||||
verbose_proxy_logger.error(
|
|
||||||
"Error creating spendlogs object - {}\n{}".format(
|
|
||||||
str(e), traceback.format_exc()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_from_regex(duration: str) -> Tuple[int, str]:
|
def _extract_from_regex(duration: str) -> Tuple[int, str]:
|
||||||
match = re.match(r"(\d+)(mo|[smhd]?)", duration)
|
match = re.match(r"(\d+)(mo|[smhd]?)", duration)
|
||||||
|
|
||||||
|
|
|
@ -403,7 +403,11 @@ class Router:
|
||||||
else:
|
else:
|
||||||
litellm.failure_callback = [self.deployment_callback_on_failure]
|
litellm.failure_callback = [self.deployment_callback_on_failure]
|
||||||
print( # noqa
|
print( # noqa
|
||||||
f"Intialized router with Routing strategy: {self.routing_strategy}\n\nRouting fallbacks: {self.fallbacks}\n\nRouting context window fallbacks: {self.context_window_fallbacks}\n\nRouter Redis Caching={self.cache.redis_cache}"
|
f"Intialized router with Routing strategy: {self.routing_strategy}\n\n"
|
||||||
|
f"Routing fallbacks: {self.fallbacks}\n\n"
|
||||||
|
f"Routing content fallbacks: {self.content_policy_fallbacks}\n\n"
|
||||||
|
f"Routing context window fallbacks: {self.context_window_fallbacks}\n\n"
|
||||||
|
f"Router Redis Caching={self.cache.redis_cache}\n"
|
||||||
) # noqa
|
) # noqa
|
||||||
self.routing_strategy_args = routing_strategy_args
|
self.routing_strategy_args = routing_strategy_args
|
||||||
self.retry_policy: Optional[RetryPolicy] = retry_policy
|
self.retry_policy: Optional[RetryPolicy] = retry_policy
|
||||||
|
|
|
@ -4,6 +4,7 @@ import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
@ -24,11 +25,21 @@ import pytest
|
||||||
def langfuse_client():
|
def langfuse_client():
|
||||||
import langfuse
|
import langfuse
|
||||||
|
|
||||||
langfuse_client = langfuse.Langfuse(
|
_langfuse_cache_key = (
|
||||||
public_key=os.environ["LANGFUSE_PUBLIC_KEY"],
|
f"{os.environ['LANGFUSE_PUBLIC_KEY']}-{os.environ['LANGFUSE_SECRET_KEY']}"
|
||||||
secret_key=os.environ["LANGFUSE_SECRET_KEY"],
|
|
||||||
host=None,
|
|
||||||
)
|
)
|
||||||
|
# use a in memory langfuse client for testing, RAM util on ci/cd gets too high when we init many langfuse clients
|
||||||
|
if _langfuse_cache_key in litellm.in_memory_llm_clients_cache:
|
||||||
|
langfuse_client = litellm.in_memory_llm_clients_cache[_langfuse_cache_key]
|
||||||
|
else:
|
||||||
|
langfuse_client = langfuse.Langfuse(
|
||||||
|
public_key=os.environ["LANGFUSE_PUBLIC_KEY"],
|
||||||
|
secret_key=os.environ["LANGFUSE_SECRET_KEY"],
|
||||||
|
host=None,
|
||||||
|
)
|
||||||
|
litellm.in_memory_llm_clients_cache[_langfuse_cache_key] = langfuse_client
|
||||||
|
|
||||||
|
print("NEW LANGFUSE CLIENT")
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"langfuse.Langfuse", MagicMock(return_value=langfuse_client)
|
"langfuse.Langfuse", MagicMock(return_value=langfuse_client)
|
||||||
|
|
|
@ -1,33 +1,37 @@
|
||||||
# What is this?
|
# What is this?
|
||||||
## Tests slack alerting on proxy logging object
|
## Tests slack alerting on proxy logging object
|
||||||
|
|
||||||
import sys, json, uuid, random, httpx
|
import asyncio
|
||||||
|
import io
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import io, asyncio
|
import random
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
# import logging
|
# import logging
|
||||||
# logging.basicConfig(level=logging.DEBUG)
|
# logging.basicConfig(level=logging.DEBUG)
|
||||||
sys.path.insert(0, os.path.abspath("../.."))
|
sys.path.insert(0, os.path.abspath("../.."))
|
||||||
from litellm.proxy.utils import ProxyLogging
|
|
||||||
from litellm.caching import DualCache, RedisCache
|
|
||||||
import litellm
|
|
||||||
import pytest
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
from litellm.utils import get_api_base
|
|
||||||
from litellm.caching import DualCache
|
|
||||||
from litellm.integrations.slack_alerting import SlackAlerting, DeploymentMetrics
|
|
||||||
import unittest.mock
|
|
||||||
from unittest.mock import AsyncMock
|
|
||||||
import pytest
|
|
||||||
from litellm.router import AlertingConfig, Router
|
|
||||||
from litellm.proxy._types import CallInfo
|
|
||||||
from openai import APIError
|
|
||||||
from litellm.router import AlertingConfig
|
|
||||||
import litellm
|
|
||||||
import os
|
import os
|
||||||
|
import unittest.mock
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from openai import APIError
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
from litellm.caching import DualCache, RedisCache
|
||||||
|
from litellm.integrations.slack_alerting import DeploymentMetrics, SlackAlerting
|
||||||
|
from litellm.proxy._types import CallInfo
|
||||||
|
from litellm.proxy.utils import ProxyLogging
|
||||||
|
from litellm.router import AlertingConfig, Router
|
||||||
|
from litellm.utils import get_api_base
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -123,8 +127,8 @@ def test_init():
|
||||||
print("passed testing slack alerting init")
|
print("passed testing slack alerting init")
|
||||||
|
|
||||||
|
|
||||||
from unittest.mock import patch, AsyncMock
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -805,3 +809,53 @@ async def test_alerting():
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
await asyncio.sleep(3)
|
await asyncio.sleep(3)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_langfuse_trace_id():
|
||||||
|
"""
|
||||||
|
- Unit test for `_add_langfuse_trace_id_to_alert` function in slack_alerting.py
|
||||||
|
"""
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging
|
||||||
|
|
||||||
|
litellm.success_callback = ["langfuse"]
|
||||||
|
|
||||||
|
litellm_logging_obj = Logging(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[{"role": "user", "content": "hi"}],
|
||||||
|
stream=False,
|
||||||
|
call_type="acompletion",
|
||||||
|
litellm_call_id="1234",
|
||||||
|
start_time=datetime.now(),
|
||||||
|
function_id="1234",
|
||||||
|
)
|
||||||
|
|
||||||
|
litellm.completion(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[{"role": "user", "content": "Hey how's it going?"}],
|
||||||
|
mock_response="Hey!",
|
||||||
|
litellm_logging_obj=litellm_logging_obj,
|
||||||
|
)
|
||||||
|
|
||||||
|
await asyncio.sleep(3)
|
||||||
|
|
||||||
|
assert litellm_logging_obj._get_trace_id(service_name="langfuse") is not None
|
||||||
|
|
||||||
|
slack_alerting = SlackAlerting(
|
||||||
|
alerting_threshold=32,
|
||||||
|
alerting=["slack"],
|
||||||
|
alert_types=["llm_exceptions"],
|
||||||
|
internal_usage_cache=DualCache(),
|
||||||
|
)
|
||||||
|
|
||||||
|
trace_url = await slack_alerting._add_langfuse_trace_id_to_alert(
|
||||||
|
request_data={"litellm_logging_obj": litellm_logging_obj}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert trace_url is not None
|
||||||
|
|
||||||
|
returned_trace_id = int(trace_url.split("/")[-1])
|
||||||
|
|
||||||
|
assert returned_trace_id == int(
|
||||||
|
litellm_logging_obj._get_trace_id(service_name="langfuse")
|
||||||
|
)
|
||||||
|
|
|
@ -1,869 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
|
||||||
sys.path.insert(0, os.path.abspath("../.."))
|
|
||||||
|
|
||||||
import litellm
|
|
||||||
from litellm import completion
|
|
||||||
|
|
||||||
litellm.num_retries = 3
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
os.environ["LANGFUSE_DEBUG"] = "True"
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def langfuse_client():
|
|
||||||
import langfuse
|
|
||||||
|
|
||||||
langfuse_client = langfuse.Langfuse(
|
|
||||||
public_key=os.environ["LANGFUSE_PUBLIC_KEY"],
|
|
||||||
secret_key=os.environ["LANGFUSE_SECRET_KEY"],
|
|
||||||
host=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
"langfuse.Langfuse", MagicMock(return_value=langfuse_client)
|
|
||||||
) as mock_langfuse_client:
|
|
||||||
yield mock_langfuse_client()
|
|
||||||
|
|
||||||
|
|
||||||
def search_logs(log_file_path, num_good_logs=1):
|
|
||||||
"""
|
|
||||||
Searches the given log file for logs containing the "/api/public" string.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
- log_file_path (str): The path to the log file to be searched.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- None
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
- Exception: If there are any bad logs found in the log file.
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
|
|
||||||
print("\n searching logs")
|
|
||||||
bad_logs = []
|
|
||||||
good_logs = []
|
|
||||||
all_logs = []
|
|
||||||
try:
|
|
||||||
with open(log_file_path, "r") as log_file:
|
|
||||||
lines = log_file.readlines()
|
|
||||||
print(f"searching logslines: {lines}")
|
|
||||||
for line in lines:
|
|
||||||
all_logs.append(line.strip())
|
|
||||||
if "/api/public" in line:
|
|
||||||
print("Found log with /api/public:")
|
|
||||||
print(line.strip())
|
|
||||||
print("\n\n")
|
|
||||||
match = re.search(
|
|
||||||
r'"POST /api/public/ingestion HTTP/1.1" (\d+) (\d+)',
|
|
||||||
line,
|
|
||||||
)
|
|
||||||
if match:
|
|
||||||
status_code = int(match.group(1))
|
|
||||||
print("STATUS CODE", status_code)
|
|
||||||
if (
|
|
||||||
status_code != 200
|
|
||||||
and status_code != 201
|
|
||||||
and status_code != 207
|
|
||||||
):
|
|
||||||
print("got a BAD log")
|
|
||||||
bad_logs.append(line.strip())
|
|
||||||
else:
|
|
||||||
good_logs.append(line.strip())
|
|
||||||
print("\nBad Logs")
|
|
||||||
print(bad_logs)
|
|
||||||
if len(bad_logs) > 0:
|
|
||||||
raise Exception(f"bad logs, Bad logs = {bad_logs}")
|
|
||||||
assert (
|
|
||||||
len(good_logs) == num_good_logs
|
|
||||||
), f"Did not get expected number of good logs, expected {num_good_logs}, got {len(good_logs)}. All logs \n {all_logs}"
|
|
||||||
print("\nGood Logs")
|
|
||||||
print(good_logs)
|
|
||||||
if len(good_logs) <= 0:
|
|
||||||
raise Exception(
|
|
||||||
f"There were no Good Logs from Langfuse. No logs with /api/public status 200. \nAll logs:{all_logs}"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
def pre_langfuse_setup():
|
|
||||||
"""
|
|
||||||
Set up the logging for the 'pre_langfuse_setup' function.
|
|
||||||
"""
|
|
||||||
# sends logs to langfuse.log
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# Configure the logging to write to a file
|
|
||||||
logging.basicConfig(filename="langfuse.log", level=logging.DEBUG)
|
|
||||||
logger = logging.getLogger()
|
|
||||||
|
|
||||||
# Add a FileHandler to the logger
|
|
||||||
file_handler = logging.FileHandler("langfuse.log", mode="w")
|
|
||||||
file_handler.setLevel(logging.DEBUG)
|
|
||||||
logger.addHandler(file_handler)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def test_langfuse_logging_async():
|
|
||||||
# this tests time added to make langfuse logging calls, vs just acompletion calls
|
|
||||||
try:
|
|
||||||
pre_langfuse_setup()
|
|
||||||
litellm.set_verbose = True
|
|
||||||
|
|
||||||
# Make 5 calls with an empty success_callback
|
|
||||||
litellm.success_callback = []
|
|
||||||
start_time_empty_callback = asyncio.run(make_async_calls())
|
|
||||||
print("done with no callback test")
|
|
||||||
|
|
||||||
print("starting langfuse test")
|
|
||||||
# Make 5 calls with success_callback set to "langfuse"
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
start_time_langfuse = asyncio.run(make_async_calls())
|
|
||||||
print("done with langfuse test")
|
|
||||||
|
|
||||||
# Compare the time for both scenarios
|
|
||||||
print(f"Time taken with success_callback='langfuse': {start_time_langfuse}")
|
|
||||||
print(f"Time taken with empty success_callback: {start_time_empty_callback}")
|
|
||||||
|
|
||||||
# assert the diff is not more than 1 second - this was 5 seconds before the fix
|
|
||||||
assert abs(start_time_langfuse - start_time_empty_callback) < 1
|
|
||||||
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"An exception occurred - {e}")
|
|
||||||
|
|
||||||
|
|
||||||
async def make_async_calls(metadata=None, **completion_kwargs):
|
|
||||||
tasks = []
|
|
||||||
for _ in range(5):
|
|
||||||
tasks.append(create_async_task())
|
|
||||||
|
|
||||||
# Measure the start time before running the tasks
|
|
||||||
start_time = asyncio.get_event_loop().time()
|
|
||||||
|
|
||||||
# Wait for all tasks to complete
|
|
||||||
responses = await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
# Print the responses when tasks return
|
|
||||||
for idx, response in enumerate(responses):
|
|
||||||
print(f"Response from Task {idx + 1}: {response}")
|
|
||||||
|
|
||||||
# Calculate the total time taken
|
|
||||||
total_time = asyncio.get_event_loop().time() - start_time
|
|
||||||
|
|
||||||
return total_time
|
|
||||||
|
|
||||||
|
|
||||||
def create_async_task(**completion_kwargs):
|
|
||||||
"""
|
|
||||||
Creates an async task for the litellm.acompletion function.
|
|
||||||
This is just the task, but it is not run here.
|
|
||||||
To run the task it must be awaited or used in other asyncio coroutine execution functions like asyncio.gather.
|
|
||||||
Any kwargs passed to this function will be passed to the litellm.acompletion function.
|
|
||||||
By default a standard set of arguments are used for the litellm.acompletion function.
|
|
||||||
"""
|
|
||||||
completion_args = {
|
|
||||||
"model": "azure/chatgpt-v-2",
|
|
||||||
"api_version": "2024-02-01",
|
|
||||||
"messages": [{"role": "user", "content": "This is a test"}],
|
|
||||||
"max_tokens": 5,
|
|
||||||
"temperature": 0.7,
|
|
||||||
"timeout": 5,
|
|
||||||
"user": "langfuse_latency_test_user",
|
|
||||||
"mock_response": "It's simple to use and easy to get started",
|
|
||||||
}
|
|
||||||
completion_args.update(completion_kwargs)
|
|
||||||
return asyncio.create_task(litellm.acompletion(**completion_args))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize("stream", [False, True])
|
|
||||||
async def test_langfuse_logging_without_request_response(stream, langfuse_client):
|
|
||||||
try:
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
_unique_trace_name = f"litellm-test-{str(uuid.uuid4())}"
|
|
||||||
litellm.set_verbose = True
|
|
||||||
litellm.turn_off_message_logging = True
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
response = await create_async_task(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
stream=stream,
|
|
||||||
metadata={"trace_id": _unique_trace_name},
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
if stream:
|
|
||||||
async for chunk in response:
|
|
||||||
print(chunk)
|
|
||||||
|
|
||||||
langfuse_client.flush()
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
|
|
||||||
# get trace with _unique_trace_name
|
|
||||||
trace = langfuse_client.get_generations(trace_id=_unique_trace_name)
|
|
||||||
|
|
||||||
print("trace_from_langfuse", trace)
|
|
||||||
|
|
||||||
_trace_data = trace.data
|
|
||||||
|
|
||||||
assert _trace_data[0].input == {
|
|
||||||
"messages": [{"content": "redacted-by-litellm", "role": "user"}]
|
|
||||||
}
|
|
||||||
assert _trace_data[0].output == {
|
|
||||||
"role": "assistant",
|
|
||||||
"content": "redacted-by-litellm",
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"An exception occurred - {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_langfuse_masked_input_output(langfuse_client):
|
|
||||||
"""
|
|
||||||
Test that creates a trace with masked input and output
|
|
||||||
"""
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
for mask_value in [True, False]:
|
|
||||||
_unique_trace_name = f"litellm-test-{str(uuid.uuid4())}"
|
|
||||||
litellm.set_verbose = True
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
response = await create_async_task(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
messages=[{"role": "user", "content": "This is a test"}],
|
|
||||||
metadata={
|
|
||||||
"trace_id": _unique_trace_name,
|
|
||||||
"mask_input": mask_value,
|
|
||||||
"mask_output": mask_value,
|
|
||||||
},
|
|
||||||
mock_response="This is a test response",
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
expected_input = (
|
|
||||||
"redacted-by-litellm"
|
|
||||||
if mask_value
|
|
||||||
else {"messages": [{"content": "This is a test", "role": "user"}]}
|
|
||||||
)
|
|
||||||
expected_output = (
|
|
||||||
"redacted-by-litellm"
|
|
||||||
if mask_value
|
|
||||||
else {"content": "This is a test response", "role": "assistant"}
|
|
||||||
)
|
|
||||||
langfuse_client.flush()
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
|
|
||||||
# get trace with _unique_trace_name
|
|
||||||
trace = langfuse_client.get_trace(id=_unique_trace_name)
|
|
||||||
generations = list(
|
|
||||||
reversed(langfuse_client.get_generations(trace_id=_unique_trace_name).data)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert trace.input == expected_input
|
|
||||||
assert trace.output == expected_output
|
|
||||||
assert generations[0].input == expected_input
|
|
||||||
assert generations[0].output == expected_output
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_alangfuse_logging_metadata(langfuse_client):
|
|
||||||
"""
|
|
||||||
Test that creates multiple traces, with a varying number of generations and sets various metadata fields
|
|
||||||
Confirms that no metadata that is standard within Langfuse is duplicated in the respective trace or generation metadata
|
|
||||||
For trace continuation certain metadata of the trace is overriden with metadata from the last generation based on the update_trace_keys field
|
|
||||||
Version is set for both the trace and the generation
|
|
||||||
Release is just set for the trace
|
|
||||||
Tags is just set for the trace
|
|
||||||
"""
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
litellm.set_verbose = True
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
|
|
||||||
trace_identifiers = {}
|
|
||||||
expected_filtered_metadata_keys = {
|
|
||||||
"trace_name",
|
|
||||||
"trace_id",
|
|
||||||
"existing_trace_id",
|
|
||||||
"trace_user_id",
|
|
||||||
"session_id",
|
|
||||||
"tags",
|
|
||||||
"generation_name",
|
|
||||||
"generation_id",
|
|
||||||
"prompt",
|
|
||||||
}
|
|
||||||
trace_metadata = {
|
|
||||||
"trace_actual_metadata_key": "trace_actual_metadata_value"
|
|
||||||
} # Allows for setting the metadata on the trace
|
|
||||||
run_id = str(uuid.uuid4())
|
|
||||||
session_id = f"litellm-test-session-{run_id}"
|
|
||||||
trace_common_metadata = {
|
|
||||||
"session_id": session_id,
|
|
||||||
"tags": ["litellm-test-tag1", "litellm-test-tag2"],
|
|
||||||
"update_trace_keys": [
|
|
||||||
"output",
|
|
||||||
"trace_metadata",
|
|
||||||
], # Overwrite the following fields in the trace with the last generation's output and the trace_user_id
|
|
||||||
"trace_metadata": trace_metadata,
|
|
||||||
"gen_metadata_key": "gen_metadata_value", # Metadata key that should not be filtered in the generation
|
|
||||||
"trace_release": "litellm-test-release",
|
|
||||||
"version": "litellm-test-version",
|
|
||||||
}
|
|
||||||
for trace_num in range(1, 3): # Two traces
|
|
||||||
metadata = copy.deepcopy(trace_common_metadata)
|
|
||||||
trace_id = f"litellm-test-trace{trace_num}-{run_id}"
|
|
||||||
metadata["trace_id"] = trace_id
|
|
||||||
metadata["trace_name"] = trace_id
|
|
||||||
trace_identifiers[trace_id] = []
|
|
||||||
print(f"Trace: {trace_id}")
|
|
||||||
for generation_num in range(
|
|
||||||
1, trace_num + 1
|
|
||||||
): # Each trace has a number of generations equal to its trace number
|
|
||||||
metadata["trace_user_id"] = f"litellm-test-user{generation_num}-{run_id}"
|
|
||||||
generation_id = (
|
|
||||||
f"litellm-test-trace{trace_num}-generation-{generation_num}-{run_id}"
|
|
||||||
)
|
|
||||||
metadata["generation_id"] = generation_id
|
|
||||||
metadata["generation_name"] = generation_id
|
|
||||||
metadata["trace_metadata"][
|
|
||||||
"generation_id"
|
|
||||||
] = generation_id # Update to test if trace_metadata is overwritten by update trace keys
|
|
||||||
trace_identifiers[trace_id].append(generation_id)
|
|
||||||
print(f"Generation: {generation_id}")
|
|
||||||
response = await create_async_task(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
mock_response=f"{session_id}:{trace_id}:{generation_id}",
|
|
||||||
messages=[
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": f"{session_id}:{trace_id}:{generation_id}",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
max_tokens=100,
|
|
||||||
temperature=0.2,
|
|
||||||
metadata=copy.deepcopy(
|
|
||||||
metadata
|
|
||||||
), # Every generation needs its own metadata, langfuse is not async/thread safe without it
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
metadata["existing_trace_id"] = trace_id
|
|
||||||
|
|
||||||
langfuse_client.flush()
|
|
||||||
await asyncio.sleep(10)
|
|
||||||
|
|
||||||
# Tests the metadata filtering and the override of the output to be the last generation
|
|
||||||
for trace_id, generation_ids in trace_identifiers.items():
|
|
||||||
trace = langfuse_client.get_trace(id=trace_id)
|
|
||||||
assert trace.id == trace_id
|
|
||||||
assert trace.session_id == session_id
|
|
||||||
assert trace.metadata != trace_metadata
|
|
||||||
generations = list(
|
|
||||||
reversed(langfuse_client.get_generations(trace_id=trace_id).data)
|
|
||||||
)
|
|
||||||
assert len(generations) == len(generation_ids)
|
|
||||||
assert (
|
|
||||||
trace.input == generations[0].input
|
|
||||||
) # Should be set by the first generation
|
|
||||||
assert (
|
|
||||||
trace.output == generations[-1].output
|
|
||||||
) # Should be overwritten by the last generation according to update_trace_keys
|
|
||||||
assert (
|
|
||||||
trace.metadata != generations[-1].metadata
|
|
||||||
) # Should be overwritten by the last generation according to update_trace_keys
|
|
||||||
assert trace.metadata["generation_id"] == generations[-1].id
|
|
||||||
assert set(trace.tags).issuperset(trace_common_metadata["tags"])
|
|
||||||
print("trace_from_langfuse", trace)
|
|
||||||
for generation_id, generation in zip(generation_ids, generations):
|
|
||||||
assert generation.id == generation_id
|
|
||||||
assert generation.trace_id == trace_id
|
|
||||||
print(
|
|
||||||
"common keys in trace",
|
|
||||||
set(generation.metadata.keys()).intersection(
|
|
||||||
expected_filtered_metadata_keys
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert set(generation.metadata.keys()).isdisjoint(
|
|
||||||
expected_filtered_metadata_keys
|
|
||||||
)
|
|
||||||
print("generation_from_langfuse", generation)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="beta test - checking langfuse output")
|
|
||||||
def test_langfuse_logging():
|
|
||||||
try:
|
|
||||||
pre_langfuse_setup()
|
|
||||||
litellm.set_verbose = True
|
|
||||||
response = completion(
|
|
||||||
model="claude-instant-1.2",
|
|
||||||
messages=[{"role": "user", "content": "Hi 👋 - i'm claude"}],
|
|
||||||
max_tokens=10,
|
|
||||||
temperature=0.2,
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
# time.sleep(5)
|
|
||||||
# # check langfuse.log to see if there was a failed response
|
|
||||||
# search_logs("langfuse.log")
|
|
||||||
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"An exception occurred - {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# test_langfuse_logging()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="beta test - checking langfuse output")
|
|
||||||
def test_langfuse_logging_stream():
|
|
||||||
try:
|
|
||||||
litellm.set_verbose = True
|
|
||||||
response = completion(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
messages=[
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": "this is a streaming test for llama2 + langfuse",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
max_tokens=20,
|
|
||||||
temperature=0.2,
|
|
||||||
stream=True,
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
for chunk in response:
|
|
||||||
pass
|
|
||||||
# print(chunk)
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
|
|
||||||
# test_langfuse_logging_stream()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="beta test - checking langfuse output")
|
|
||||||
def test_langfuse_logging_custom_generation_name():
|
|
||||||
try:
|
|
||||||
litellm.set_verbose = True
|
|
||||||
response = completion(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
messages=[{"role": "user", "content": "Hi 👋 - i'm claude"}],
|
|
||||||
max_tokens=10,
|
|
||||||
metadata={
|
|
||||||
"langfuse/foo": "bar",
|
|
||||||
"langsmith/fizz": "buzz",
|
|
||||||
"prompt_hash": "asdf98u0j9131123",
|
|
||||||
"generation_name": "ishaan-test-generation",
|
|
||||||
"generation_id": "gen-id22",
|
|
||||||
"trace_id": "trace-id22",
|
|
||||||
"trace_user_id": "user-id2",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"An exception occurred - {e}")
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
|
|
||||||
# test_langfuse_logging_custom_generation_name()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="beta test - checking langfuse output")
|
|
||||||
def test_langfuse_logging_embedding():
|
|
||||||
try:
|
|
||||||
litellm.set_verbose = True
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
response = litellm.embedding(
|
|
||||||
model="text-embedding-ada-002",
|
|
||||||
input=["gm", "ishaan"],
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"An exception occurred - {e}")
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="beta test - checking langfuse output")
|
|
||||||
def test_langfuse_logging_function_calling():
|
|
||||||
litellm.set_verbose = True
|
|
||||||
function1 = [
|
|
||||||
{
|
|
||||||
"name": "get_current_weather",
|
|
||||||
"description": "Get the current weather in a given location",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"location": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The city and state, e.g. San Francisco, CA",
|
|
||||||
},
|
|
||||||
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
|
||||||
},
|
|
||||||
"required": ["location"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
response = completion(
|
|
||||||
model="gpt-3.5-turbo",
|
|
||||||
messages=[{"role": "user", "content": "what's the weather in boston"}],
|
|
||||||
temperature=0.1,
|
|
||||||
functions=function1,
|
|
||||||
)
|
|
||||||
print(response)
|
|
||||||
except litellm.Timeout as e:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
|
|
||||||
# test_langfuse_logging_function_calling()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Need to address this on main")
|
|
||||||
def test_aaalangfuse_existing_trace_id():
|
|
||||||
"""
|
|
||||||
When existing trace id is passed, don't set trace params -> prevents overwriting the trace
|
|
||||||
|
|
||||||
Pass 1 logging object with a trace
|
|
||||||
|
|
||||||
Pass 2nd logging object with the trace id
|
|
||||||
|
|
||||||
Assert no changes to the trace
|
|
||||||
"""
|
|
||||||
# Test - if the logs were sent to the correct team on langfuse
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
import litellm
|
|
||||||
from litellm.integrations.langfuse import LangFuseLogger
|
|
||||||
|
|
||||||
langfuse_Logger = LangFuseLogger(
|
|
||||||
langfuse_public_key=os.getenv("LANGFUSE_PROJECT2_PUBLIC"),
|
|
||||||
langfuse_secret=os.getenv("LANGFUSE_PROJECT2_SECRET"),
|
|
||||||
)
|
|
||||||
litellm.success_callback = ["langfuse"]
|
|
||||||
|
|
||||||
# langfuse_args = {'kwargs': { 'start_time': 'end_time': datetime.datetime(2024, 5, 1, 7, 31, 29, 903685), 'user_id': None, 'print_verbose': <function print_verbose at 0x109d1f420>, 'level': 'DEFAULT', 'status_message': None}
|
|
||||||
response_obj = litellm.ModelResponse(
|
|
||||||
id="chatcmpl-9K5HUAbVRqFrMZKXL0WoC295xhguY",
|
|
||||||
choices=[
|
|
||||||
litellm.Choices(
|
|
||||||
finish_reason="stop",
|
|
||||||
index=0,
|
|
||||||
message=litellm.Message(
|
|
||||||
content="I'm sorry, I am an AI assistant and do not have real-time information. I recommend checking a reliable weather website or app for the most up-to-date weather information in Boston.",
|
|
||||||
role="assistant",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
],
|
|
||||||
created=1714573888,
|
|
||||||
model="gpt-3.5-turbo-0125",
|
|
||||||
object="chat.completion",
|
|
||||||
system_fingerprint="fp_3b956da36b",
|
|
||||||
usage=litellm.Usage(completion_tokens=37, prompt_tokens=14, total_tokens=51),
|
|
||||||
)
|
|
||||||
|
|
||||||
### NEW TRACE ###
|
|
||||||
message = [{"role": "user", "content": "what's the weather in boston"}]
|
|
||||||
langfuse_args = {
|
|
||||||
"response_obj": response_obj,
|
|
||||||
"kwargs": {
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"litellm_params": {
|
|
||||||
"acompletion": False,
|
|
||||||
"api_key": None,
|
|
||||||
"force_timeout": 600,
|
|
||||||
"logger_fn": None,
|
|
||||||
"verbose": False,
|
|
||||||
"custom_llm_provider": "openai",
|
|
||||||
"api_base": "https://api.openai.com/v1/",
|
|
||||||
"litellm_call_id": None,
|
|
||||||
"model_alias_map": {},
|
|
||||||
"completion_call_id": None,
|
|
||||||
"metadata": None,
|
|
||||||
"model_info": None,
|
|
||||||
"proxy_server_request": None,
|
|
||||||
"preset_cache_key": None,
|
|
||||||
"no-log": False,
|
|
||||||
"stream_response": {},
|
|
||||||
},
|
|
||||||
"messages": message,
|
|
||||||
"optional_params": {"temperature": 0.1, "extra_body": {}},
|
|
||||||
"start_time": "2024-05-01 07:31:27.986164",
|
|
||||||
"stream": False,
|
|
||||||
"user": None,
|
|
||||||
"call_type": "completion",
|
|
||||||
"litellm_call_id": None,
|
|
||||||
"completion_start_time": "2024-05-01 07:31:29.903685",
|
|
||||||
"temperature": 0.1,
|
|
||||||
"extra_body": {},
|
|
||||||
"input": [{"role": "user", "content": "what's the weather in boston"}],
|
|
||||||
"api_key": "my-api-key",
|
|
||||||
"additional_args": {
|
|
||||||
"complete_input_dict": {
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"messages": [
|
|
||||||
{"role": "user", "content": "what's the weather in boston"}
|
|
||||||
],
|
|
||||||
"temperature": 0.1,
|
|
||||||
"extra_body": {},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"log_event_type": "successful_api_call",
|
|
||||||
"end_time": "2024-05-01 07:31:29.903685",
|
|
||||||
"cache_hit": None,
|
|
||||||
"response_cost": 6.25e-05,
|
|
||||||
},
|
|
||||||
"start_time": datetime.datetime(2024, 5, 1, 7, 31, 27, 986164),
|
|
||||||
"end_time": datetime.datetime(2024, 5, 1, 7, 31, 29, 903685),
|
|
||||||
"user_id": None,
|
|
||||||
"print_verbose": litellm.print_verbose,
|
|
||||||
"level": "DEFAULT",
|
|
||||||
"status_message": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
langfuse_response_object = langfuse_Logger.log_event(**langfuse_args)
|
|
||||||
|
|
||||||
import langfuse
|
|
||||||
|
|
||||||
langfuse_client = langfuse.Langfuse(
|
|
||||||
public_key=os.getenv("LANGFUSE_PROJECT2_PUBLIC"),
|
|
||||||
secret_key=os.getenv("LANGFUSE_PROJECT2_SECRET"),
|
|
||||||
)
|
|
||||||
|
|
||||||
trace_id = langfuse_response_object["trace_id"]
|
|
||||||
|
|
||||||
assert trace_id is not None
|
|
||||||
|
|
||||||
langfuse_client.flush()
|
|
||||||
|
|
||||||
time.sleep(2)
|
|
||||||
|
|
||||||
print(langfuse_client.get_trace(id=trace_id))
|
|
||||||
|
|
||||||
initial_langfuse_trace = langfuse_client.get_trace(id=trace_id)
|
|
||||||
|
|
||||||
### EXISTING TRACE ###
|
|
||||||
|
|
||||||
new_metadata = {"existing_trace_id": trace_id}
|
|
||||||
new_messages = [{"role": "user", "content": "What do you know?"}]
|
|
||||||
new_response_obj = litellm.ModelResponse(
|
|
||||||
id="chatcmpl-9K5HUAbVRqFrMZKXL0WoC295xhguY",
|
|
||||||
choices=[
|
|
||||||
litellm.Choices(
|
|
||||||
finish_reason="stop",
|
|
||||||
index=0,
|
|
||||||
message=litellm.Message(
|
|
||||||
content="What do I know?",
|
|
||||||
role="assistant",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
],
|
|
||||||
created=1714573888,
|
|
||||||
model="gpt-3.5-turbo-0125",
|
|
||||||
object="chat.completion",
|
|
||||||
system_fingerprint="fp_3b956da36b",
|
|
||||||
usage=litellm.Usage(completion_tokens=37, prompt_tokens=14, total_tokens=51),
|
|
||||||
)
|
|
||||||
langfuse_args = {
|
|
||||||
"response_obj": new_response_obj,
|
|
||||||
"kwargs": {
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"litellm_params": {
|
|
||||||
"acompletion": False,
|
|
||||||
"api_key": None,
|
|
||||||
"force_timeout": 600,
|
|
||||||
"logger_fn": None,
|
|
||||||
"verbose": False,
|
|
||||||
"custom_llm_provider": "openai",
|
|
||||||
"api_base": "https://api.openai.com/v1/",
|
|
||||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
|
||||||
"model_alias_map": {},
|
|
||||||
"completion_call_id": None,
|
|
||||||
"metadata": new_metadata,
|
|
||||||
"model_info": None,
|
|
||||||
"proxy_server_request": None,
|
|
||||||
"preset_cache_key": None,
|
|
||||||
"no-log": False,
|
|
||||||
"stream_response": {},
|
|
||||||
},
|
|
||||||
"messages": new_messages,
|
|
||||||
"optional_params": {"temperature": 0.1, "extra_body": {}},
|
|
||||||
"start_time": "2024-05-01 07:31:27.986164",
|
|
||||||
"stream": False,
|
|
||||||
"user": None,
|
|
||||||
"call_type": "completion",
|
|
||||||
"litellm_call_id": "508113a1-c6f1-48ce-a3e1-01c6cce9330e",
|
|
||||||
"completion_start_time": "2024-05-01 07:31:29.903685",
|
|
||||||
"temperature": 0.1,
|
|
||||||
"extra_body": {},
|
|
||||||
"input": [{"role": "user", "content": "what's the weather in boston"}],
|
|
||||||
"api_key": "my-api-key",
|
|
||||||
"additional_args": {
|
|
||||||
"complete_input_dict": {
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"messages": [
|
|
||||||
{"role": "user", "content": "what's the weather in boston"}
|
|
||||||
],
|
|
||||||
"temperature": 0.1,
|
|
||||||
"extra_body": {},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"log_event_type": "successful_api_call",
|
|
||||||
"end_time": "2024-05-01 07:31:29.903685",
|
|
||||||
"cache_hit": None,
|
|
||||||
"response_cost": 6.25e-05,
|
|
||||||
},
|
|
||||||
"start_time": datetime.datetime(2024, 5, 1, 7, 31, 27, 986164),
|
|
||||||
"end_time": datetime.datetime(2024, 5, 1, 7, 31, 29, 903685),
|
|
||||||
"user_id": None,
|
|
||||||
"print_verbose": litellm.print_verbose,
|
|
||||||
"level": "DEFAULT",
|
|
||||||
"status_message": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
langfuse_response_object = langfuse_Logger.log_event(**langfuse_args)
|
|
||||||
|
|
||||||
new_trace_id = langfuse_response_object["trace_id"]
|
|
||||||
|
|
||||||
assert new_trace_id == trace_id
|
|
||||||
|
|
||||||
langfuse_client.flush()
|
|
||||||
|
|
||||||
time.sleep(2)
|
|
||||||
|
|
||||||
print(langfuse_client.get_trace(id=trace_id))
|
|
||||||
|
|
||||||
new_langfuse_trace = langfuse_client.get_trace(id=trace_id)
|
|
||||||
|
|
||||||
initial_langfuse_trace_dict = dict(initial_langfuse_trace)
|
|
||||||
initial_langfuse_trace_dict.pop("updatedAt")
|
|
||||||
initial_langfuse_trace_dict.pop("timestamp")
|
|
||||||
|
|
||||||
new_langfuse_trace_dict = dict(new_langfuse_trace)
|
|
||||||
new_langfuse_trace_dict.pop("updatedAt")
|
|
||||||
new_langfuse_trace_dict.pop("timestamp")
|
|
||||||
|
|
||||||
assert initial_langfuse_trace_dict == new_langfuse_trace_dict
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
|
||||||
condition=not os.environ.get("OPENAI_API_KEY", False),
|
|
||||||
reason="Authentication missing for openai",
|
|
||||||
)
|
|
||||||
def test_langfuse_logging_tool_calling():
|
|
||||||
litellm.set_verbose = True
|
|
||||||
|
|
||||||
def get_current_weather(location, unit="fahrenheit"):
|
|
||||||
"""Get the current weather in a given location"""
|
|
||||||
if "tokyo" in location.lower():
|
|
||||||
return json.dumps(
|
|
||||||
{"location": "Tokyo", "temperature": "10", "unit": "celsius"}
|
|
||||||
)
|
|
||||||
elif "san francisco" in location.lower():
|
|
||||||
return json.dumps(
|
|
||||||
{"location": "San Francisco", "temperature": "72", "unit": "fahrenheit"}
|
|
||||||
)
|
|
||||||
elif "paris" in location.lower():
|
|
||||||
return json.dumps(
|
|
||||||
{"location": "Paris", "temperature": "22", "unit": "celsius"}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return json.dumps({"location": location, "temperature": "unknown"})
|
|
||||||
|
|
||||||
messages = [
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": "What's the weather like in San Francisco, Tokyo, and Paris?",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
tools = [
|
|
||||||
{
|
|
||||||
"type": "function",
|
|
||||||
"function": {
|
|
||||||
"name": "get_current_weather",
|
|
||||||
"description": "Get the current weather in a given location",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"location": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The city and state, e.g. San Francisco, CA",
|
|
||||||
},
|
|
||||||
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
|
||||||
},
|
|
||||||
"required": ["location"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
response = litellm.completion(
|
|
||||||
model="gpt-3.5-turbo-1106",
|
|
||||||
messages=messages,
|
|
||||||
tools=tools,
|
|
||||||
tool_choice="auto", # auto is default, but we'll be explicit
|
|
||||||
)
|
|
||||||
print("\nLLM Response1:\n", response)
|
|
||||||
response_message = response.choices[0].message
|
|
||||||
tool_calls = response.choices[0].message.tool_calls
|
|
||||||
|
|
||||||
|
|
||||||
# test_langfuse_logging_tool_calling()
|
|
||||||
|
|
||||||
|
|
||||||
def get_langfuse_prompt(name: str):
|
|
||||||
import langfuse
|
|
||||||
from langfuse import Langfuse
|
|
||||||
|
|
||||||
try:
|
|
||||||
langfuse = Langfuse(
|
|
||||||
public_key=os.environ["LANGFUSE_DEV_PUBLIC_KEY"],
|
|
||||||
secret_key=os.environ["LANGFUSE_DEV_SK_KEY"],
|
|
||||||
host=os.environ["LANGFUSE_HOST"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get current production version of a text prompt
|
|
||||||
prompt = langfuse.get_prompt(name=name)
|
|
||||||
return prompt
|
|
||||||
except Exception as e:
|
|
||||||
raise Exception(f"Error getting prompt: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.skip(
|
|
||||||
reason="local only test, use this to verify if we can send request to litellm proxy server"
|
|
||||||
)
|
|
||||||
async def test_make_request():
|
|
||||||
response = await litellm.acompletion(
|
|
||||||
model="openai/llama3",
|
|
||||||
api_key="sk-1234",
|
|
||||||
base_url="http://localhost:4000",
|
|
||||||
messages=[{"role": "user", "content": "Hi 👋 - i'm claude"}],
|
|
||||||
extra_body={
|
|
||||||
"metadata": {
|
|
||||||
"tags": ["openai"],
|
|
||||||
"prompt": get_langfuse_prompt("test-chat"),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
|
@ -2,9 +2,14 @@
|
||||||
## This tests the blocked user pre call hook for the proxy server
|
## This tests the blocked user pre call hook for the proxy server
|
||||||
|
|
||||||
|
|
||||||
import sys, os, asyncio, time, random
|
import asyncio
|
||||||
from datetime import datetime
|
import os
|
||||||
|
import random
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
|
|
||||||
|
@ -14,57 +19,53 @@ import os
|
||||||
sys.path.insert(
|
sys.path.insert(
|
||||||
0, os.path.abspath("../..")
|
0, os.path.abspath("../..")
|
||||||
) # Adds the parent directory to the system path
|
) # Adds the parent directory to the system path
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
|
from litellm import Router, mock_completion
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.caching import DualCache
|
||||||
|
from litellm.proxy._types import UserAPIKeyAuth
|
||||||
from litellm.proxy.enterprise.enterprise_hooks.blocked_user_list import (
|
from litellm.proxy.enterprise.enterprise_hooks.blocked_user_list import (
|
||||||
_ENTERPRISE_BlockedUserList,
|
_ENTERPRISE_BlockedUserList,
|
||||||
)
|
)
|
||||||
from litellm import Router, mock_completion
|
from litellm.proxy.management_endpoints.internal_user_endpoints import (
|
||||||
from litellm.proxy.utils import ProxyLogging, hash_token
|
new_user,
|
||||||
from litellm.proxy._types import UserAPIKeyAuth
|
user_info,
|
||||||
from litellm.caching import DualCache
|
user_update,
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token
|
|
||||||
|
|
||||||
import pytest, logging, asyncio
|
|
||||||
import litellm, asyncio
|
|
||||||
from litellm.proxy.proxy_server import (
|
|
||||||
user_api_key_auth,
|
|
||||||
block_user,
|
|
||||||
)
|
)
|
||||||
from litellm.proxy.management_endpoints.key_management_endpoints import (
|
from litellm.proxy.management_endpoints.key_management_endpoints import (
|
||||||
delete_key_fn,
|
delete_key_fn,
|
||||||
info_key_fn,
|
|
||||||
update_key_fn,
|
|
||||||
generate_key_fn,
|
generate_key_fn,
|
||||||
generate_key_helper_fn,
|
generate_key_helper_fn,
|
||||||
|
info_key_fn,
|
||||||
|
update_key_fn,
|
||||||
)
|
)
|
||||||
from litellm.proxy.management_endpoints.internal_user_endpoints import (
|
from litellm.proxy.proxy_server import block_user, user_api_key_auth
|
||||||
new_user,
|
from litellm.proxy.spend_tracking.spend_management_endpoints import (
|
||||||
user_update,
|
|
||||||
user_info,
|
|
||||||
)
|
|
||||||
from litellm.proxy.spend_reporting_endpoints.spend_management_endpoints import (
|
|
||||||
spend_user_fn,
|
|
||||||
spend_key_fn,
|
spend_key_fn,
|
||||||
|
spend_user_fn,
|
||||||
view_spend_logs,
|
view_spend_logs,
|
||||||
)
|
)
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token
|
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token
|
||||||
from litellm._logging import verbose_proxy_logger
|
|
||||||
|
|
||||||
verbose_proxy_logger.setLevel(level=logging.DEBUG)
|
verbose_proxy_logger.setLevel(level=logging.DEBUG)
|
||||||
|
|
||||||
|
from starlette.datastructures import URL
|
||||||
|
|
||||||
|
from litellm.caching import DualCache
|
||||||
from litellm.proxy._types import (
|
from litellm.proxy._types import (
|
||||||
NewUserRequest,
|
|
||||||
GenerateKeyRequest,
|
|
||||||
DynamoDBArgs,
|
|
||||||
KeyRequest,
|
|
||||||
UpdateKeyRequest,
|
|
||||||
GenerateKeyRequest,
|
|
||||||
BlockUsers,
|
BlockUsers,
|
||||||
|
DynamoDBArgs,
|
||||||
|
GenerateKeyRequest,
|
||||||
|
KeyRequest,
|
||||||
|
NewUserRequest,
|
||||||
|
UpdateKeyRequest,
|
||||||
)
|
)
|
||||||
from litellm.proxy.utils import DBClient
|
from litellm.proxy.utils import DBClient
|
||||||
from starlette.datastructures import URL
|
|
||||||
from litellm.caching import DualCache
|
|
||||||
|
|
||||||
proxy_logging_obj = ProxyLogging(user_api_key_cache=DualCache())
|
proxy_logging_obj = ProxyLogging(user_api_key_cache=DualCache())
|
||||||
|
|
||||||
|
|
|
@ -670,7 +670,7 @@ def test_litellm_predibase_exception():
|
||||||
# print(f"accuracy_score: {accuracy_score}")
|
# print(f"accuracy_score: {accuracy_score}")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta"])
|
@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta", "anthropic"])
|
||||||
def test_exception_mapping(provider):
|
def test_exception_mapping(provider):
|
||||||
"""
|
"""
|
||||||
For predibase, run through a set of mock exceptions
|
For predibase, run through a set of mock exceptions
|
||||||
|
@ -712,3 +712,27 @@ def test_exception_mapping(provider):
|
||||||
)
|
)
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_anthropic_tool_calling_exception():
|
||||||
|
"""
|
||||||
|
Related - https://github.com/BerriAI/litellm/issues/4348
|
||||||
|
"""
|
||||||
|
tools = [
|
||||||
|
{
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": "get_current_weather",
|
||||||
|
"description": "Get the current weather in a given location",
|
||||||
|
"parameters": {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
litellm.completion(
|
||||||
|
model="claude-3-5-sonnet-20240620",
|
||||||
|
messages=[{"role": "user", "content": "Hey, how's it going?"}],
|
||||||
|
tools=tools,
|
||||||
|
)
|
||||||
|
except litellm.BadRequestError:
|
||||||
|
pass
|
||||||
|
|
|
@ -75,7 +75,7 @@ from litellm.proxy.proxy_server import (
|
||||||
new_end_user,
|
new_end_user,
|
||||||
user_api_key_auth,
|
user_api_key_auth,
|
||||||
)
|
)
|
||||||
from litellm.proxy.spend_reporting_endpoints.spend_management_endpoints import (
|
from litellm.proxy.spend_tracking.spend_management_endpoints import (
|
||||||
spend_key_fn,
|
spend_key_fn,
|
||||||
spend_user_fn,
|
spend_user_fn,
|
||||||
view_spend_logs,
|
view_spend_logs,
|
||||||
|
|
52
litellm/tests/test_proxy_routes.py
Normal file
52
litellm/tests/test_proxy_routes.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
|
||||||
|
# this file is to test litellm/proxy
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.abspath("../..")
|
||||||
|
) # Adds the parent directory to the system path
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
from litellm.proxy._types import LiteLLMRoutes
|
||||||
|
from litellm.proxy.proxy_server import router
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.DEBUG, # Set the desired logging level
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_routes_on_litellm_proxy():
|
||||||
|
"""
|
||||||
|
Goal of this test: Test that we have all the critical OpenAI Routes on the Proxy server Fast API router
|
||||||
|
|
||||||
|
|
||||||
|
this prevents accidentelly deleting /threads, or /batches etc
|
||||||
|
"""
|
||||||
|
_all_routes = []
|
||||||
|
for route in router.routes:
|
||||||
|
|
||||||
|
_path_as_str = str(route.path)
|
||||||
|
if ":path" in _path_as_str:
|
||||||
|
# remove the :path
|
||||||
|
_path_as_str = _path_as_str.replace(":path", "")
|
||||||
|
_all_routes.append(_path_as_str)
|
||||||
|
|
||||||
|
print("ALL ROUTES on LiteLLM Proxy:", _all_routes)
|
||||||
|
print("\n\n")
|
||||||
|
print("ALL OPENAI ROUTES:", LiteLLMRoutes.openai_routes.value)
|
||||||
|
|
||||||
|
for route in LiteLLMRoutes.openai_routes.value:
|
||||||
|
assert route in _all_routes
|
|
@ -1,26 +1,32 @@
|
||||||
import sys, os
|
import os
|
||||||
import traceback, uuid
|
import sys
|
||||||
|
import traceback
|
||||||
|
import uuid
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
from fastapi.routing import APIRoute
|
from fastapi.routing import APIRoute
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
import os, io, time
|
import io
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
# this file is to test litellm/proxy
|
# this file is to test litellm/proxy
|
||||||
|
|
||||||
sys.path.insert(
|
sys.path.insert(
|
||||||
0, os.path.abspath("../..")
|
0, os.path.abspath("../..")
|
||||||
) # Adds the parent directory to the system path
|
) # Adds the parent directory to the system path
|
||||||
import pytest, logging, asyncio
|
import asyncio
|
||||||
import litellm, asyncio
|
|
||||||
import json
|
|
||||||
import datetime
|
import datetime
|
||||||
from litellm.proxy.utils import (
|
import json
|
||||||
get_logging_payload,
|
import logging
|
||||||
SpendLogsPayload,
|
|
||||||
SpendLogsMetadata,
|
import pytest
|
||||||
) # noqa: E402
|
|
||||||
|
import litellm
|
||||||
|
from litellm.proxy.spend_tracking.spend_tracking_utils import get_logging_payload
|
||||||
|
from litellm.proxy.utils import SpendLogsMetadata, SpendLogsPayload # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
def test_spend_logs_payload():
|
def test_spend_logs_payload():
|
||||||
|
@ -53,6 +59,7 @@ def test_spend_logs_payload():
|
||||||
"model_alias_map": {},
|
"model_alias_map": {},
|
||||||
"completion_call_id": None,
|
"completion_call_id": None,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
|
"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"],
|
||||||
"user_api_key": "88dc28d0f030c55ed4ab77ed8faf098196cb1c05df778539800c9f1243fe6b4b",
|
"user_api_key": "88dc28d0f030c55ed4ab77ed8faf098196cb1c05df778539800c9f1243fe6b4b",
|
||||||
"user_api_key_alias": None,
|
"user_api_key_alias": None,
|
||||||
"user_api_end_user_max_budget": None,
|
"user_api_end_user_max_budget": None,
|
||||||
|
@ -193,3 +200,8 @@ def test_spend_logs_payload():
|
||||||
assert isinstance(payload["metadata"], str)
|
assert isinstance(payload["metadata"], str)
|
||||||
payload["metadata"] = json.loads(payload["metadata"])
|
payload["metadata"] = json.loads(payload["metadata"])
|
||||||
assert set(payload["metadata"].keys()) == set(expected_metadata_keys)
|
assert set(payload["metadata"].keys()) == set(expected_metadata_keys)
|
||||||
|
|
||||||
|
# This is crucial - used in PROD, it should pass, related issue: https://github.com/BerriAI/litellm/issues/4334
|
||||||
|
assert (
|
||||||
|
payload["request_tags"] == '["model-anthropic-claude-v2.1", "app-ishaan-prod"]'
|
||||||
|
)
|
||||||
|
|
|
@ -394,7 +394,7 @@ async def test_router_completion_streaming():
|
||||||
"model": "azure/gpt-turbo",
|
"model": "azure/gpt-turbo",
|
||||||
"api_key": "os.environ/AZURE_FRANCE_API_KEY",
|
"api_key": "os.environ/AZURE_FRANCE_API_KEY",
|
||||||
"api_base": "https://openai-france-1234.openai.azure.com",
|
"api_base": "https://openai-france-1234.openai.azure.com",
|
||||||
"rpm": 1440,
|
"tpm": 1440,
|
||||||
"mock_response": "Hello world",
|
"mock_response": "Hello world",
|
||||||
},
|
},
|
||||||
"model_info": {"id": 1},
|
"model_info": {"id": 1},
|
||||||
|
@ -405,7 +405,7 @@ async def test_router_completion_streaming():
|
||||||
"model": "azure/gpt-35-turbo",
|
"model": "azure/gpt-35-turbo",
|
||||||
"api_key": "os.environ/AZURE_EUROPE_API_KEY",
|
"api_key": "os.environ/AZURE_EUROPE_API_KEY",
|
||||||
"api_base": "https://my-endpoint-europe-berri-992.openai.azure.com",
|
"api_base": "https://my-endpoint-europe-berri-992.openai.azure.com",
|
||||||
"rpm": 6,
|
"tpm": 6,
|
||||||
"mock_response": "Hello world",
|
"mock_response": "Hello world",
|
||||||
},
|
},
|
||||||
"model_info": {"id": 2},
|
"model_info": {"id": 2},
|
||||||
|
|
|
@ -2,9 +2,14 @@
|
||||||
## This tests the batch update spend logic on the proxy server
|
## This tests the batch update spend logic on the proxy server
|
||||||
|
|
||||||
|
|
||||||
import sys, os, asyncio, time, random
|
import asyncio
|
||||||
from datetime import datetime
|
import os
|
||||||
|
import random
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
|
|
||||||
|
@ -14,54 +19,50 @@ import os
|
||||||
sys.path.insert(
|
sys.path.insert(
|
||||||
0, os.path.abspath("../..")
|
0, os.path.abspath("../..")
|
||||||
) # Adds the parent directory to the system path
|
) # Adds the parent directory to the system path
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm import Router, mock_completion
|
from litellm import Router, mock_completion
|
||||||
from litellm.proxy.utils import ProxyLogging
|
from litellm._logging import verbose_proxy_logger
|
||||||
from litellm.proxy._types import UserAPIKeyAuth
|
|
||||||
from litellm.caching import DualCache
|
from litellm.caching import DualCache
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token
|
from litellm.proxy._types import UserAPIKeyAuth
|
||||||
|
|
||||||
import pytest, logging, asyncio
|
|
||||||
import litellm, asyncio
|
|
||||||
from litellm.proxy.proxy_server import (
|
|
||||||
user_api_key_auth,
|
|
||||||
block_user,
|
|
||||||
)
|
|
||||||
from litellm.proxy.spend_reporting_endpoints.spend_management_endpoints import (
|
|
||||||
spend_user_fn,
|
|
||||||
spend_key_fn,
|
|
||||||
view_spend_logs,
|
|
||||||
)
|
|
||||||
from litellm.proxy.management_endpoints.internal_user_endpoints import (
|
from litellm.proxy.management_endpoints.internal_user_endpoints import (
|
||||||
new_user,
|
new_user,
|
||||||
user_update,
|
|
||||||
user_info,
|
user_info,
|
||||||
|
user_update,
|
||||||
)
|
)
|
||||||
from litellm.proxy.management_endpoints.key_management_endpoints import (
|
from litellm.proxy.management_endpoints.key_management_endpoints import (
|
||||||
delete_key_fn,
|
delete_key_fn,
|
||||||
info_key_fn,
|
|
||||||
update_key_fn,
|
|
||||||
generate_key_fn,
|
generate_key_fn,
|
||||||
generate_key_helper_fn,
|
generate_key_helper_fn,
|
||||||
|
info_key_fn,
|
||||||
|
update_key_fn,
|
||||||
|
)
|
||||||
|
from litellm.proxy.proxy_server import block_user, user_api_key_auth
|
||||||
|
from litellm.proxy.spend_tracking.spend_management_endpoints import (
|
||||||
|
spend_key_fn,
|
||||||
|
spend_user_fn,
|
||||||
|
view_spend_logs,
|
||||||
)
|
)
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token, update_spend
|
from litellm.proxy.utils import PrismaClient, ProxyLogging, hash_token, update_spend
|
||||||
from litellm._logging import verbose_proxy_logger
|
|
||||||
|
|
||||||
verbose_proxy_logger.setLevel(level=logging.DEBUG)
|
verbose_proxy_logger.setLevel(level=logging.DEBUG)
|
||||||
|
|
||||||
|
from starlette.datastructures import URL
|
||||||
|
|
||||||
|
from litellm.caching import DualCache
|
||||||
from litellm.proxy._types import (
|
from litellm.proxy._types import (
|
||||||
NewUserRequest,
|
|
||||||
GenerateKeyRequest,
|
|
||||||
DynamoDBArgs,
|
|
||||||
KeyRequest,
|
|
||||||
UpdateKeyRequest,
|
|
||||||
GenerateKeyRequest,
|
|
||||||
BlockUsers,
|
BlockUsers,
|
||||||
|
DynamoDBArgs,
|
||||||
|
GenerateKeyRequest,
|
||||||
|
KeyRequest,
|
||||||
|
NewUserRequest,
|
||||||
|
UpdateKeyRequest,
|
||||||
)
|
)
|
||||||
from litellm.proxy.utils import DBClient
|
from litellm.proxy.utils import DBClient
|
||||||
from starlette.datastructures import URL
|
|
||||||
from litellm.caching import DualCache
|
|
||||||
|
|
||||||
proxy_logging_obj = ProxyLogging(user_api_key_cache=DualCache())
|
proxy_logging_obj = ProxyLogging(user_api_key_cache=DualCache())
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import copy
|
import copy
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
@ -548,3 +549,63 @@ def test_get_llm_provider_ft_models():
|
||||||
|
|
||||||
model, custom_llm_provider, _, _ = get_llm_provider(model="ft:gpt-4o-2024-05-13")
|
model, custom_llm_provider, _, _ = get_llm_provider(model="ft:gpt-4o-2024-05-13")
|
||||||
assert custom_llm_provider == "openai"
|
assert custom_llm_provider == "openai"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("langfuse_trace_id", [None, "my-unique-trace-id"])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"langfuse_existing_trace_id", [None, "my-unique-existing-trace-id"]
|
||||||
|
)
|
||||||
|
def test_logging_trace_id(langfuse_trace_id, langfuse_existing_trace_id):
|
||||||
|
"""
|
||||||
|
- Unit test for `_get_trace_id` function in Logging obj
|
||||||
|
"""
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging
|
||||||
|
|
||||||
|
litellm.success_callback = ["langfuse"]
|
||||||
|
litellm_call_id = "my-unique-call-id"
|
||||||
|
litellm_logging_obj = Logging(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[{"role": "user", "content": "hi"}],
|
||||||
|
stream=False,
|
||||||
|
call_type="acompletion",
|
||||||
|
litellm_call_id=litellm_call_id,
|
||||||
|
start_time=datetime.now(),
|
||||||
|
function_id="1234",
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
|
if langfuse_trace_id is not None:
|
||||||
|
metadata["trace_id"] = langfuse_trace_id
|
||||||
|
if langfuse_existing_trace_id is not None:
|
||||||
|
metadata["existing_trace_id"] = langfuse_existing_trace_id
|
||||||
|
|
||||||
|
litellm.completion(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[{"role": "user", "content": "Hey how's it going?"}],
|
||||||
|
mock_response="Hey!",
|
||||||
|
litellm_logging_obj=litellm_logging_obj,
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
assert litellm_logging_obj._get_trace_id(service_name="langfuse") is not None
|
||||||
|
|
||||||
|
## if existing_trace_id exists
|
||||||
|
if langfuse_existing_trace_id is not None:
|
||||||
|
assert (
|
||||||
|
litellm_logging_obj._get_trace_id(service_name="langfuse")
|
||||||
|
== langfuse_existing_trace_id
|
||||||
|
)
|
||||||
|
## if trace_id exists
|
||||||
|
elif langfuse_trace_id is not None:
|
||||||
|
assert (
|
||||||
|
litellm_logging_obj._get_trace_id(service_name="langfuse")
|
||||||
|
== langfuse_trace_id
|
||||||
|
)
|
||||||
|
## if existing_trace_id exists
|
||||||
|
else:
|
||||||
|
assert (
|
||||||
|
litellm_logging_obj._get_trace_id(service_name="langfuse")
|
||||||
|
== litellm_call_id
|
||||||
|
)
|
||||||
|
|
|
@ -5927,21 +5927,28 @@ def exception_type(
|
||||||
if "prompt is too long" in error_str or "prompt: length" in error_str:
|
if "prompt is too long" in error_str or "prompt: length" in error_str:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContextWindowExceededError(
|
raise ContextWindowExceededError(
|
||||||
message=error_str,
|
message="AnthropicError - {}".format(error_str),
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
)
|
)
|
||||||
if "Invalid API Key" in error_str:
|
if "Invalid API Key" in error_str:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise AuthenticationError(
|
raise AuthenticationError(
|
||||||
message=error_str,
|
message="AnthropicError - {}".format(error_str),
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
)
|
)
|
||||||
if "content filtering policy" in error_str:
|
if "content filtering policy" in error_str:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContentPolicyViolationError(
|
raise ContentPolicyViolationError(
|
||||||
message=error_str,
|
message="AnthropicError - {}".format(error_str),
|
||||||
|
model=model,
|
||||||
|
llm_provider="anthropic",
|
||||||
|
)
|
||||||
|
if "Client error '400 Bad Request'" in error_str:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise BadRequestError(
|
||||||
|
message="AnthropicError - {}".format(error_str),
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
)
|
)
|
||||||
|
@ -5953,7 +5960,6 @@ def exception_type(
|
||||||
message=f"AnthropicException - {error_str}",
|
message=f"AnthropicException - {error_str}",
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
|
||||||
)
|
)
|
||||||
elif (
|
elif (
|
||||||
original_exception.status_code == 400
|
original_exception.status_code == 400
|
||||||
|
@ -5964,7 +5970,13 @@ def exception_type(
|
||||||
message=f"AnthropicException - {error_str}",
|
message=f"AnthropicException - {error_str}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
response=original_exception.response,
|
)
|
||||||
|
elif original_exception.status_code == 404:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise NotFoundError(
|
||||||
|
message=f"AnthropicException - {error_str}",
|
||||||
|
model=model,
|
||||||
|
llm_provider="anthropic",
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 408:
|
elif original_exception.status_code == 408:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
|
@ -5979,16 +5991,20 @@ def exception_type(
|
||||||
message=f"AnthropicException - {error_str}",
|
message=f"AnthropicException - {error_str}",
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 500:
|
elif original_exception.status_code == 500:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise APIError(
|
raise litellm.InternalServerError(
|
||||||
status_code=500,
|
message=f"AnthropicException - {error_str}. Handle with `litellm.InternalServerError`.",
|
||||||
message=f"AnthropicException - {error_str}. Handle with `litellm.APIError`.",
|
llm_provider="anthropic",
|
||||||
|
model=model,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 503:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise litellm.ServiceUnavailableError(
|
||||||
|
message=f"AnthropicException - {error_str}. Handle with `litellm.ServiceUnavailableError`.",
|
||||||
llm_provider="anthropic",
|
llm_provider="anthropic",
|
||||||
model=model,
|
model=model,
|
||||||
request=original_exception.request,
|
|
||||||
)
|
)
|
||||||
elif custom_llm_provider == "replicate":
|
elif custom_llm_provider == "replicate":
|
||||||
if "Incorrect authentication token" in error_str:
|
if "Incorrect authentication token" in error_str:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "1.40.21"
|
version = "1.40.24"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -90,7 +90,7 @@ requires = ["poetry-core", "wheel"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
version = "1.40.21"
|
version = "1.40.24"
|
||||||
version_files = [
|
version_files = [
|
||||||
"pyproject.toml:^version"
|
"pyproject.toml:^version"
|
||||||
]
|
]
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +0,0 @@
|
||||||
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{61994:function(e,s,l){Promise.resolve().then(l.bind(l,667))},667:function(e,s,l){"use strict";l.r(s),l.d(s,{default:function(){return _}});var t=l(3827),a=l(64090),r=l(47907),n=l(16450),i=l(18190),o=l(13810),u=l(10384),c=l(46453),d=l(71801),m=l(52273),h=l(42440),x=l(30953),j=l(777),p=l(37963),f=l(60620),g=l(1861);function _(){let[e]=f.Z.useForm(),s=(0,r.useSearchParams)();s.get("token");let l=s.get("id"),[_,Z]=(0,a.useState)(null),[w,b]=(0,a.useState)(""),[N,S]=(0,a.useState)(""),[k,y]=(0,a.useState)(null),[v,E]=(0,a.useState)(""),[F,I]=(0,a.useState)("");return(0,a.useEffect)(()=>{l&&(0,j.W_)(l).then(e=>{let s=e.login_url;console.log("login_url:",s),E(s);let l=e.token,t=(0,p.o)(l);I(l),console.log("decoded:",t),Z(t.key),console.log("decoded user email:",t.user_email),S(t.user_email),y(t.user_id)})},[l]),(0,t.jsx)("div",{className:"mx-auto max-w-md mt-10",children:(0,t.jsxs)(o.Z,{children:[(0,t.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,t.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,t.jsx)(d.Z,{children:"Claim your user account to login to Admin UI."}),(0,t.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,t.jsxs)(c.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,t.jsx)(u.Z,{children:"SSO is under the Enterprise Tirer."}),(0,t.jsx)(u.Z,{children:(0,t.jsx)(n.Z,{variant:"primary",className:"mb-2",children:(0,t.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,t.jsxs)(f.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",_,"token:",F,"formValues:",e),_&&F&&(e.user_email=N,k&&l&&(0,j.m_)(_,l,k,e.password).then(e=>{var s;let l="/ui/";console.log("redirecting to:",l+="?userID="+((null===(s=e.data)||void 0===s?void 0:s.user_id)||e.user_id)+"&token="+F),window.location.href=l}))},children:[(0,t.jsxs)(t.Fragment,{children:[(0,t.jsx)(f.Z.Item,{label:"Email Address",name:"user_email",children:(0,t.jsx)(m.Z,{type:"email",disabled:!0,value:N,defaultValue:N,className:"max-w-md"})}),(0,t.jsx)(f.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,t.jsx)(m.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,t.jsx)("div",{className:"mt-10",children:(0,t.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}}},function(e){e.O(0,[665,294,684,777,971,69,744],function(){return e(e.s=61994)}),_N_E=e.O()}]);
|
|
|
@ -0,0 +1 @@
|
||||||
|
(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[461],{61994:function(e,s,t){Promise.resolve().then(t.bind(t,667))},667:function(e,s,t){"use strict";t.r(s),t.d(s,{default:function(){return _}});var l=t(3827),n=t(64090),a=t(47907),r=t(16450),i=t(18190),o=t(13810),c=t(10384),u=t(46453),d=t(71801),m=t(52273),h=t(42440),x=t(30953),p=t(777),f=t(37963),j=t(60620),g=t(1861);function _(){let[e]=j.Z.useForm(),s=(0,a.useSearchParams)();!function(e){console.log("COOKIES",document.cookie);let s=document.cookie.split("; ").find(s=>s.startsWith(e+"="));s&&s.split("=")[1]}("token");let t=s.get("id"),[_,Z]=(0,n.useState)(null),[k,w]=(0,n.useState)(""),[S,b]=(0,n.useState)(""),[N,y]=(0,n.useState)(null),[v,E]=(0,n.useState)(""),[I,O]=(0,n.useState)("");return(0,n.useEffect)(()=>{t&&(0,p.W_)(t).then(e=>{let s=e.login_url;console.log("login_url:",s),E(s);let t=e.token,l=(0,f.o)(t);O(t),console.log("decoded:",l),Z(l.key),console.log("decoded user email:",l.user_email),b(l.user_email),y(l.user_id)})},[t]),(0,l.jsx)("div",{className:"mx-auto max-w-md mt-10",children:(0,l.jsxs)(o.Z,{children:[(0,l.jsx)(h.Z,{className:"text-sm mb-5 text-center",children:"\uD83D\uDE85 LiteLLM"}),(0,l.jsx)(h.Z,{className:"text-xl",children:"Sign up"}),(0,l.jsx)(d.Z,{children:"Claim your user account to login to Admin UI."}),(0,l.jsx)(i.Z,{className:"mt-4",title:"SSO",icon:x.GH$,color:"sky",children:(0,l.jsxs)(u.Z,{numItems:2,className:"flex justify-between items-center",children:[(0,l.jsx)(c.Z,{children:"SSO is under the Enterprise Tirer."}),(0,l.jsx)(c.Z,{children:(0,l.jsx)(r.Z,{variant:"primary",className:"mb-2",children:(0,l.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"Get Free Trial"})})})]})}),(0,l.jsxs)(j.Z,{className:"mt-10 mb-5 mx-auto",layout:"vertical",onFinish:e=>{console.log("in handle submit. accessToken:",_,"token:",I,"formValues:",e),_&&I&&(e.user_email=S,N&&t&&(0,p.m_)(_,t,N,e.password).then(e=>{var s;let t="/ui/";console.log("redirecting to:",t+="?userID="+((null===(s=e.data)||void 0===s?void 0:s.user_id)||e.user_id)+"&token="+I),window.location.href=t}))},children:[(0,l.jsxs)(l.Fragment,{children:[(0,l.jsx)(j.Z.Item,{label:"Email Address",name:"user_email",children:(0,l.jsx)(m.Z,{type:"email",disabled:!0,value:S,defaultValue:S,className:"max-w-md"})}),(0,l.jsx)(j.Z.Item,{label:"Password",name:"password",rules:[{required:!0,message:"password required to sign up"}],help:"Create a password for your account",children:(0,l.jsx)(m.Z,{placeholder:"",type:"password",className:"max-w-md"})})]}),(0,l.jsx)("div",{className:"mt-10",children:(0,l.jsx)(g.ZP,{htmlType:"submit",children:"Sign Up"})})]})]})})}}},function(e){e.O(0,[665,294,684,777,971,69,744],function(){return e(e.s=61994)}),_N_E=e.O()}]);
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/b65d5698d1a1958d.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e](n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u<e.length;u++){for(var n=e[u][0],r=e[u][1],o=e[u][2],c=!0,f=0;f<n.length;f++)i>=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o<i&&(i=o));if(c){e.splice(u--,1);var a=r();void 0!==a&&(t=a)}}return t},d.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return d.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},d.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var o=Object.create(null);d.r(o);var u={};t=t||[null,n({}),n([]),n(n)];for(var i=2&r&&e;"object"==typeof i&&!~t.indexOf(i);i=n(i))Object.getOwnPropertyNames(i).forEach(function(t){u[t]=function(){return e[t]}});return u.default=function(){return e},d.d(o,u),o},d.d=function(e,t){for(var n in t)d.o(t,n)&&!d.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},d.f={},d.e=function(e){return Promise.all(Object.keys(d.f).reduce(function(t,n){return d.f[n](e,t),t},[]))},d.u=function(e){},d.miniCssF=function(e){return"static/css/0f6908625573deae.css"},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},o="_N_E:",d.l=function(e,t,n,u){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var i,c,f=document.getElementsByTagName("script"),a=0;a<f.length;a++){var l=f[a];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==o+n){i=l;break}}i||(c=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,d.nc&&i.setAttribute("nonce",d.nc),i.setAttribute("data-webpack",o+n),i.src=d.tu(e)),r[e]=[t];var s=function(t,n){i.onerror=i.onload=null,clearTimeout(p);var o=r[e];if(delete r[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(function(e){return e(n)}),t)return t(n)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=s.bind(null,i.onerror),i.onload=s.bind(null,i.onload),c&&document.head.appendChild(i)},d.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.nmd=function(e){return e.paths=[],e.children||(e.children=[]),e},d.tt=function(){return void 0===u&&(u={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(u=trustedTypes.createPolicy("nextjs#bundler",u))),u},d.tu=function(e){return d.tt().createScriptURL(e)},d.p="/ui/_next/",i={272:0},d.f.j=function(e,t){var n=d.o(i,e)?i[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(272!=e){var r=new Promise(function(t,r){n=i[e]=[t,r]});t.push(n[2]=r);var o=d.p+d.u(e),u=Error();d.l(o,function(t){if(d.o(i,e)&&(0!==(n=i[e])&&(i[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),o=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+r+": "+o+")",u.name="ChunkLoadError",u.type=r,u.request=o,n[1](u)}},"chunk-"+e,e)}else i[e]=0}},d.O.j=function(e){return 0===i[e]},c=function(e,t){var n,r,o=t[0],u=t[1],c=t[2],f=0;if(o.some(function(e){return 0!==i[e]})){for(n in u)d.o(u,n)&&(d.m[n]=u[n]);if(c)var a=c(d)}for(e&&e(t);f<o.length;f++)r=o[f],d.o(i,r)&&i[r]&&i[r][0](),i[r]=0;return d.O(a)},(f=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(c.bind(null,0)),f.push=c.bind(null,f.push.bind(f))}();
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-6f7793f21bbb2fbe.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-f960ab1e6d32b002.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-04708d7d4a17c1ee.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-6f7793f21bbb2fbe.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/b65d5698d1a1958d.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[68101,[\"936\",\"static/chunks/2f6dbc85-052c4579f80d66ae.js\",\"294\",\"static/chunks/294-0e35509d5ca95267.js\",\"131\",\"static/chunks/131-6a03368053f9d26d.js\",\"684\",\"static/chunks/684-bb2d2f93d92acb0b.js\",\"759\",\"static/chunks/759-83a8bdddfe32b5d9.js\",\"777\",\"static/chunks/777-71fb78fdb4897cc3.js\",\"931\",\"static/chunks/app/page-626098dc8320c801.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/b65d5698d1a1958d.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"S9_6IC27HNWjJtr-LNaAO\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_12bbc4\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
<!DOCTYPE html><html id="__next_error__"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-a8fd417ac0c6c8a5.js" crossorigin=""/><script src="/ui/_next/static/chunks/fd9d1056-f960ab1e6d32b002.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/69-04708d7d4a17c1ee.js" async="" crossorigin=""></script><script src="/ui/_next/static/chunks/main-app-9b4fb13a7db53edf.js" async="" crossorigin=""></script><title>LiteLLM Dashboard</title><meta name="description" content="LiteLLM Proxy Admin UI"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><meta name="next-size-adjust"/><script src="/ui/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js" crossorigin="" noModule=""></script></head><body><script src="/ui/_next/static/chunks/webpack-a8fd417ac0c6c8a5.js" crossorigin="" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0]);self.__next_f.push([2,null])</script><script>self.__next_f.push([1,"1:HL[\"/ui/_next/static/media/c9a5bc6a7c948fb0-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n2:HL[\"/ui/_next/static/css/0f6908625573deae.css\",\"style\",{\"crossOrigin\":\"\"}]\n0:\"$L3\"\n"])</script><script>self.__next_f.push([1,"4:I[47690,[],\"\"]\n6:I[77831,[],\"\"]\n7:I[48951,[\"936\",\"static/chunks/2f6dbc85-052c4579f80d66ae.js\",\"294\",\"static/chunks/294-0e35509d5ca95267.js\",\"131\",\"static/chunks/131-6a03368053f9d26d.js\",\"684\",\"static/chunks/684-bb2d2f93d92acb0b.js\",\"759\",\"static/chunks/759-83a8bdddfe32b5d9.js\",\"777\",\"static/chunks/777-f76791513e294b30.js\",\"931\",\"static/chunks/app/page-42b04008af7da690.js\"],\"\"]\n8:I[5613,[],\"\"]\n9:I[31778,[],\"\"]\nb:I[48955,[],\"\"]\nc:[]\n"])</script><script>self.__next_f.push([1,"3:[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/0f6908625573deae.css\",\"precedence\":\"next\",\"crossOrigin\":\"\"}]],[\"$\",\"$L4\",null,{\"buildId\":\"DahySukItzAH9ZoOiMmQB\",\"assetPrefix\":\"/ui\",\"initialCanonicalUrl\":\"/\",\"initialTree\":[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],\"initialSeedData\":[\"\",{\"children\":[\"__PAGE__\",{},[\"$L5\",[\"$\",\"$L6\",null,{\"propsForComponent\":{\"params\":{}},\"Component\":\"$7\",\"isStaticGeneration\":true}],null]]},[null,[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__className_12bbc4\",\"children\":[\"$\",\"$L8\",null,{\"parallelRouterKey\":\"children\",\"segmentPath\":[\"children\"],\"loading\":\"$undefined\",\"loadingStyles\":\"$undefined\",\"loadingScripts\":\"$undefined\",\"hasLoading\":false,\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L9\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":\"404\"}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],\"notFoundStyles\":[],\"styles\":null}]}]}],null]],\"initialHead\":[false,\"$La\"],\"globalErrorComponent\":\"$b\",\"missingSlots\":\"$Wc\"}]]\n"])</script><script>self.__next_f.push([1,"a:[[\"$\",\"meta\",\"0\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}],[\"$\",\"meta\",\"1\",{\"charSet\":\"utf-8\"}],[\"$\",\"title\",\"2\",{\"children\":\"LiteLLM Dashboard\"}],[\"$\",\"meta\",\"3\",{\"name\":\"description\",\"content\":\"LiteLLM Proxy Admin UI\"}],[\"$\",\"link\",\"4\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"meta\",\"5\",{\"name\":\"next-size-adjust\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,""])</script></body></html>
|
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[68101,["936","static/chunks/2f6dbc85-052c4579f80d66ae.js","294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","684","static/chunks/684-bb2d2f93d92acb0b.js","759","static/chunks/759-83a8bdddfe32b5d9.js","777","static/chunks/777-71fb78fdb4897cc3.js","931","static/chunks/app/page-626098dc8320c801.js"],""]
|
3:I[48951,["936","static/chunks/2f6dbc85-052c4579f80d66ae.js","294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","684","static/chunks/684-bb2d2f93d92acb0b.js","759","static/chunks/759-83a8bdddfe32b5d9.js","777","static/chunks/777-f76791513e294b30.js","931","static/chunks/app/page-42b04008af7da690.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[87494,["294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","777","static/chunks/777-71fb78fdb4897cc3.js","418","static/chunks/app/model_hub/page-4cb65c32467214b5.js"],""]
|
3:I[87494,["294","static/chunks/294-0e35509d5ca95267.js","131","static/chunks/131-6a03368053f9d26d.js","777","static/chunks/777-f76791513e294b30.js","418","static/chunks/app/model_hub/page-ba7819b59161aa64.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["model_hub",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["model_hub",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","model_hub","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
2:I[77831,[],""]
|
2:I[77831,[],""]
|
||||||
3:I[667,["665","static/chunks/3014691f-589a5f4865c3822f.js","294","static/chunks/294-0e35509d5ca95267.js","684","static/chunks/684-bb2d2f93d92acb0b.js","777","static/chunks/777-71fb78fdb4897cc3.js","461","static/chunks/app/onboarding/page-664c7288e11fff5a.js"],""]
|
3:I[667,["665","static/chunks/3014691f-589a5f4865c3822f.js","294","static/chunks/294-0e35509d5ca95267.js","684","static/chunks/684-bb2d2f93d92acb0b.js","777","static/chunks/777-f76791513e294b30.js","461","static/chunks/app/onboarding/page-fd30ae439831db99.js"],""]
|
||||||
4:I[5613,[],""]
|
4:I[5613,[],""]
|
||||||
5:I[31778,[],""]
|
5:I[31778,[],""]
|
||||||
0:["S9_6IC27HNWjJtr-LNaAO",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/b65d5698d1a1958d.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
0:["DahySukItzAH9ZoOiMmQB",[[["",{"children":["onboarding",{"children":["__PAGE__",{}]}]},"$undefined","$undefined",true],["",{"children":["onboarding",{"children":["__PAGE__",{},["$L1",["$","$L2",null,{"propsForComponent":{"params":{}},"Component":"$3","isStaticGeneration":true}],null]]},["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children","onboarding","children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined","styles":null}]]},[null,["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__className_12bbc4","children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"loading":"$undefined","loadingStyles":"$undefined","loadingScripts":"$undefined","hasLoading":false,"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L5",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[],"styles":null}]}]}],null]],[[["$","link","0",{"rel":"stylesheet","href":"/ui/_next/static/css/0f6908625573deae.css","precedence":"next","crossOrigin":""}]],"$L6"]]]]
|
||||||
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
6:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"LiteLLM Dashboard"}],["$","meta","3",{"name":"description","content":"LiteLLM Proxy Admin UI"}],["$","link","4",{"rel":"icon","href":"/ui/favicon.ico","type":"image/x-icon","sizes":"16x16"}],["$","meta","5",{"name":"next-size-adjust"}]]
|
||||||
1:null
|
1:null
|
||||||
|
|
|
@ -20,10 +20,19 @@ import {
|
||||||
} from "@/components/networking";
|
} from "@/components/networking";
|
||||||
import { jwtDecode } from "jwt-decode";
|
import { jwtDecode } from "jwt-decode";
|
||||||
import { Form, Button as Button2, message } from "antd";
|
import { Form, Button as Button2, message } from "antd";
|
||||||
|
|
||||||
|
function getCookie(name: string) {
|
||||||
|
console.log("COOKIES", document.cookie)
|
||||||
|
const cookieValue = document.cookie
|
||||||
|
.split('; ')
|
||||||
|
.find(row => row.startsWith(name + '='));
|
||||||
|
return cookieValue ? cookieValue.split('=')[1] : null;
|
||||||
|
}
|
||||||
|
|
||||||
export default function Onboarding() {
|
export default function Onboarding() {
|
||||||
const [form] = Form.useForm();
|
const [form] = Form.useForm();
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const token = searchParams.get("token");
|
const token = getCookie('token');
|
||||||
const inviteID = searchParams.get("id");
|
const inviteID = searchParams.get("id");
|
||||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||||
const [defaultUserEmail, setDefaultUserEmail] = useState<string>("");
|
const [defaultUserEmail, setDefaultUserEmail] = useState<string>("");
|
||||||
|
|
|
@ -19,6 +19,15 @@ import CacheDashboard from "@/components/cache_dashboard";
|
||||||
import { jwtDecode } from "jwt-decode";
|
import { jwtDecode } from "jwt-decode";
|
||||||
import { Typography } from "antd";
|
import { Typography } from "antd";
|
||||||
|
|
||||||
|
function getCookie(name: string) {
|
||||||
|
console.log("COOKIES", document.cookie)
|
||||||
|
const cookieValue = document.cookie
|
||||||
|
.split('; ')
|
||||||
|
.find(row => row.startsWith(name + '='));
|
||||||
|
return cookieValue ? cookieValue.split('=')[1] : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function formatUserRole(userRole: string) {
|
function formatUserRole(userRole: string) {
|
||||||
if (!userRole) {
|
if (!userRole) {
|
||||||
return "Undefined Role";
|
return "Undefined Role";
|
||||||
|
@ -68,7 +77,7 @@ const CreateKeyPage = () => {
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const [modelData, setModelData] = useState<any>({ data: [] });
|
const [modelData, setModelData] = useState<any>({ data: [] });
|
||||||
const userID = searchParams.get("userID");
|
const userID = searchParams.get("userID");
|
||||||
const token = searchParams.get("token");
|
const token = getCookie('token');
|
||||||
|
|
||||||
const [page, setPage] = useState("api-keys");
|
const [page, setPage] = useState("api-keys");
|
||||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||||
|
|
|
@ -24,6 +24,14 @@ type UserSpendData = {
|
||||||
max_budget?: number | null;
|
max_budget?: number | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function getCookie(name: string) {
|
||||||
|
console.log("COOKIES", document.cookie)
|
||||||
|
const cookieValue = document.cookie
|
||||||
|
.split('; ')
|
||||||
|
.find(row => row.startsWith(name + '='));
|
||||||
|
return cookieValue ? cookieValue.split('=')[1] : null;
|
||||||
|
}
|
||||||
|
|
||||||
interface UserDashboardProps {
|
interface UserDashboardProps {
|
||||||
userID: string | null;
|
userID: string | null;
|
||||||
userRole: string | null;
|
userRole: string | null;
|
||||||
|
@ -66,7 +74,8 @@ const UserDashboard: React.FC<UserDashboardProps> = ({
|
||||||
const viewSpend = searchParams.get("viewSpend");
|
const viewSpend = searchParams.get("viewSpend");
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
|
||||||
const token = searchParams.get("token");
|
const token = getCookie('token');
|
||||||
|
|
||||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||||
const [teamSpend, setTeamSpend] = useState<number | null>(null);
|
const [teamSpend, setTeamSpend] = useState<number | null>(null);
|
||||||
const [userModels, setUserModels] = useState<string[]>([]);
|
const [userModels, setUserModels] = useState<string[]>([]);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue