diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 2f58665788..c631ca3ccf 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -425,7 +425,6 @@ async def proxy_startup_event(app: FastAPI): import json init_verbose_loggers() - ### LOAD MASTER KEY ### # check if master key set in environment - load from there master_key = get_secret("LITELLM_MASTER_KEY", None) # type: ignore @@ -691,12 +690,18 @@ try: @app.middleware("http") async def redirect_ui_middleware(request: Request, call_next): if request.url.path.startswith("/ui"): - new_path = request.url.path.replace("/ui", f"{server_root_path}/ui", 1) - return RedirectResponse(new_path) + new_url = str(request.url).replace("/ui", f"{server_root_path}/ui", 1) + return RedirectResponse(new_url) return await call_next(request) except Exception: pass +# current_dir = os.path.dirname(os.path.abspath(__file__)) +# ui_path = os.path.join(current_dir, "_experimental", "out") +# # Mount this test directory instead +# app.mount("/ui", StaticFiles(directory=ui_path, html=True), name="ui") + + app.add_middleware( CORSMiddleware, allow_origins=origins, diff --git a/litellm/utils.py b/litellm/utils.py index 9e5e8c8cba..443bfb5afd 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1725,7 +1725,7 @@ def _format_type(props, indent): def token_counter( model="", - custom_tokenizer: Optional[dict] = None, + custom_tokenizer: Optional[Union[dict, SelectTokenizerResponse]] = None, text: Optional[Union[str, List[str]]] = None, messages: Optional[List] = None, count_response_tokens: Optional[bool] = False,