Merge branch 'main' into litellm_bedrock_anthropic_fix

This commit is contained in:
Krish Dholakia 2024-05-17 17:47:32 -07:00 committed by GitHub
commit 3a06fe2818
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 155 additions and 14 deletions

View file

@ -110,6 +110,7 @@ from litellm.router import LiteLLM_Params, Deployment, updateDeployment
from litellm.router import ModelInfo as RouterModelInfo
from litellm._logging import verbose_router_logger, verbose_proxy_logger
from litellm.proxy.auth.handle_jwt import JWTHandler
from litellm.proxy.auth.litellm_license import LicenseCheck
from litellm.proxy.hooks.prompt_injection_detection import (
_OPTIONAL_PromptInjectionDetection,
)
@ -150,6 +151,7 @@ from fastapi.responses import (
ORJSONResponse,
JSONResponse,
)
from fastapi.openapi.utils import get_openapi
from fastapi.responses import RedirectResponse
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
@ -169,17 +171,30 @@ except Exception as e:
except Exception as e:
pass
_license_check = LicenseCheck()
premium_user: bool = _license_check.is_premium()
ui_link = f"/ui/"
ui_message = (
f"👉 [```LiteLLM Admin Panel on /ui```]({ui_link}). Create, Edit Keys with SSO"
)
### CUSTOM BRANDING [ENTERPRISE FEATURE] ###
_docs_url = None if os.getenv("NO_DOCS", "False") == "True" else "/"
_title = os.getenv("DOCS_TITLE", "LiteLLM API") if premium_user else "LiteLLM API"
_description = (
os.getenv(
"DOCS_DESCRIPTION",
f"Proxy Server to call 100+ LLMs in the OpenAI format\n\n{ui_message}",
)
if premium_user
else f"Proxy Server to call 100+ LLMs in the OpenAI format\n\n{ui_message}"
)
app = FastAPI(
docs_url=_docs_url,
title="LiteLLM API",
description=f"Proxy Server to call 100+ LLMs in the OpenAI format\n\n{ui_message}",
title=_title,
description=_description,
version=version,
root_path=os.environ.get(
"SERVER_ROOT_PATH", ""
@ -187,6 +202,31 @@ app = FastAPI(
)
### CUSTOM API DOCS [ENTERPRISE FEATURE] ###
# Custom OpenAPI schema generator to include only selected routes
def custom_openapi():
if app.openapi_schema:
return app.openapi_schema
openapi_schema = get_openapi(
title=app.title,
version=app.version,
description=app.description,
routes=app.routes,
)
# Filter routes to include only specific ones
openai_routes = LiteLLMRoutes.openai_routes.value
paths_to_include: dict = {}
for route in openai_routes:
paths_to_include[route] = openapi_schema["paths"][route]
openapi_schema["paths"] = paths_to_include
app.openapi_schema = openapi_schema
return app.openapi_schema
if os.getenv("DOCS_FILTERED", "False") == "True" and premium_user:
app.openapi = custom_openapi # type: ignore
class ProxyException(Exception):
# NOTE: DO NOT MODIFY THIS
# This is used to map exactly to OPENAI Exceptions