docs: add deprecation notice on original /v1/messages endpoint + add better swagger tags on pass-through endpoints

This commit is contained in:
Krrish Dholakia 2024-11-25 19:40:54 +05:30
parent 177acd1c93
commit b55c829561
5 changed files with 68 additions and 17 deletions

View file

@ -12,10 +12,10 @@ model_list:
vertex_ai_project: "adroit-crow-413218"
vertex_ai_location: "us-east5"
litellm_settings:
key_generation_settings:
team_key_generation:
allowed_team_member_roles: ["admin"]
required_params: ["tags"] # require team admins to set tags for cost-tracking when generating a team key
personal_key_generation: # maps to 'Default Team' on UI
allowed_user_roles: ["proxy_admin"]
# litellm_settings:
# key_generation_settings:
# team_key_generation:
# allowed_team_member_roles: ["admin"]
# required_params: ["tags"] # require team admins to set tags for cost-tracking when generating a team key
# personal_key_generation: # maps to 'Default Team' on UI
# allowed_user_roles: ["proxy_admin"]

View file

@ -54,12 +54,19 @@ def create_request_copy(request: Request):
}
@router.api_route("/gemini/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"])
@router.api_route(
"/gemini/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Google AI Studio Pass-through", "pass-through"],
)
async def gemini_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
):
"""
[Docs](https://docs.litellm.ai/docs/pass_through/google_ai_studio)
"""
## CHECK FOR LITELLM API KEY IN THE QUERY PARAMS - ?..key=LITELLM_API_KEY
api_key = request.query_params.get("key")
@ -111,13 +118,20 @@ async def gemini_proxy_route(
return received_value
@router.api_route("/cohere/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"])
@router.api_route(
"/cohere/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Cohere Pass-through", "pass-through"],
)
async def cohere_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
[Docs](https://docs.litellm.ai/docs/pass_through/cohere)
"""
base_target_url = "https://api.cohere.com"
encoded_endpoint = httpx.URL(endpoint).path
@ -154,7 +168,9 @@ async def cohere_proxy_route(
@router.api_route(
"/anthropic/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"]
"/anthropic/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Anthropic Pass-through", "pass-through"],
)
async def anthropic_proxy_route(
endpoint: str,
@ -162,6 +178,9 @@ async def anthropic_proxy_route(
fastapi_response: Response,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
[Docs](https://docs.litellm.ai/docs/anthropic_completion)
"""
base_target_url = "https://api.anthropic.com"
encoded_endpoint = httpx.URL(endpoint).path
@ -201,13 +220,20 @@ async def anthropic_proxy_route(
return received_value
@router.api_route("/bedrock/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"])
@router.api_route(
"/bedrock/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Bedrock Pass-through", "pass-through"],
)
async def bedrock_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
[Docs](https://docs.litellm.ai/docs/pass_through/bedrock)
"""
create_request_copy(request)
try:
@ -275,13 +301,22 @@ async def bedrock_proxy_route(
return received_value
@router.api_route("/azure/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"])
@router.api_route(
"/azure/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Azure Pass-through", "pass-through"],
)
async def azure_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
Call any azure endpoint using the proxy.
Just use `{PROXY_BASE_URL}/azure/{endpoint:path}`
"""
base_target_url = get_secret_str(secret_name="AZURE_API_BASE")
if base_target_url is None:
raise Exception(

View file

@ -5663,11 +5663,11 @@ async def anthropic_response( # noqa: PLR0915
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
This is a BETA endpoint that calls 100+ LLMs in the anthropic format.
🚨 DEPRECATED ENDPOINT🚨
To do a simple pass-through for anthropic, do `{PROXY_BASE_URL}/anthropic/v1/messages`
Use `{PROXY_BASE_URL}/anthropic/v1/messages` instead - [Docs](https://docs.litellm.ai/docs/anthropic_completion).
Docs - https://docs.litellm.ai/docs/anthropic_completion
This was a BETA endpoint that calls 100+ LLMs in the anthropic format.
"""
from litellm import adapter_completion
from litellm.adapters.anthropic_adapter import anthropic_adapter

View file

@ -58,12 +58,21 @@ def create_request_copy(request: Request):
}
@router.api_route("/langfuse/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"])
@router.api_route(
"/langfuse/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Langfuse Pass-through", "pass-through"],
)
async def langfuse_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
):
"""
Call Langfuse via LiteLLM proxy. Works with Langfuse SDK.
[Docs](https://docs.litellm.ai/docs/pass_through/langfuse)
"""
## CHECK FOR LITELLM API KEY IN THE QUERY PARAMS - ?..key=LITELLM_API_KEY
api_key = request.headers.get("Authorization") or ""

View file

@ -113,13 +113,20 @@ def construct_target_url(
@router.api_route(
"/vertex-ai/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"]
"/vertex-ai/{endpoint:path}",
methods=["GET", "POST", "PUT", "DELETE"],
tags=["Vertex AI Pass-through", "pass-through"],
)
async def vertex_proxy_route(
endpoint: str,
request: Request,
fastapi_response: Response,
):
"""
Call LiteLLM proxy via Vertex AI SDK.
[Docs](https://docs.litellm.ai/docs/pass_through/vertex_ai)
"""
encoded_endpoint = httpx.URL(endpoint).path
import re