mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
feat(endpoints.py): initial set of crud endpoints for reusable credentials on proxy
This commit is contained in:
parent
e518e3558b
commit
e6d9aa051e
4 changed files with 122 additions and 3 deletions
|
@ -200,7 +200,7 @@ WATSONX_DEFAULT_API_VERSION = "2024-03-13"
|
|||
### COHERE EMBEDDINGS DEFAULT TYPE ###
|
||||
COHERE_DEFAULT_EMBEDDING_INPUT_TYPE: COHERE_EMBEDDING_INPUT_TYPES = "search_document"
|
||||
### CREDENTIALS ###
|
||||
credential_list: Optional[List[CredentialItem]] = None
|
||||
credential_list: List[CredentialItem] = []
|
||||
### GUARDRAILS ###
|
||||
llamaguard_model_name: Optional[str] = None
|
||||
openai_moderations_model_name: Optional[str] = None
|
||||
|
|
115
litellm/proxy/credential_endpoints/endpoints.py
Normal file
115
litellm/proxy/credential_endpoints/endpoints.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
"""
|
||||
CRUD endpoints for storing reusable credentials.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import traceback
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, Response
|
||||
|
||||
import litellm
|
||||
from litellm.proxy._types import UserAPIKeyAuth
|
||||
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||
from litellm.proxy.utils import handle_exception_on_proxy
|
||||
from litellm.types.utils import CredentialItem
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post(
|
||||
"/v1/credentials",
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
tags=["credential management"],
|
||||
)
|
||||
async def create_credential(
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
credential: CredentialItem,
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
try:
|
||||
litellm.credential_list.append(credential)
|
||||
return {"success": True, "message": "Credential created successfully"}
|
||||
except Exception as e:
|
||||
return handle_exception_on_proxy(e)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/v1/credentials",
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
tags=["credential management"],
|
||||
)
|
||||
async def get_credentials(
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
try:
|
||||
return {"success": True, "credentials": litellm.credential_list}
|
||||
except Exception as e:
|
||||
return handle_exception_on_proxy(e)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/v1/credentials/{credential_name}",
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
tags=["credential management"],
|
||||
)
|
||||
async def get_credential(
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
credential_name: str,
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
try:
|
||||
for credential in litellm.credential_list:
|
||||
if credential.credential_name == credential_name:
|
||||
return {"success": True, "credential": credential}
|
||||
return {"success": False, "message": "Credential not found"}
|
||||
except Exception as e:
|
||||
return handle_exception_on_proxy(e)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/v1/credentials/{credential_name}",
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
tags=["credential management"],
|
||||
)
|
||||
async def delete_credential(
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
credential_name: str,
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
try:
|
||||
litellm.credential_list = [
|
||||
credential
|
||||
for credential in litellm.credential_list
|
||||
if credential.credential_name != credential_name
|
||||
]
|
||||
return {"success": True, "message": "Credential deleted successfully"}
|
||||
except Exception as e:
|
||||
return handle_exception_on_proxy(e)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/v1/credentials/{credential_name}",
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
tags=["credential management"],
|
||||
)
|
||||
async def update_credential(
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
credential_name: str,
|
||||
credential: CredentialItem,
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
try:
|
||||
for i, c in enumerate(litellm.credential_list):
|
||||
if c.credential_name == credential_name:
|
||||
litellm.credential_list[i] = credential
|
||||
return {"success": True, "message": "Credential updated successfully"}
|
||||
return {"success": False, "message": "Credential not found"}
|
||||
except Exception as e:
|
||||
return handle_exception_on_proxy(e)
|
|
@ -164,6 +164,7 @@ from litellm.proxy.common_utils.openai_endpoint_utils import (
|
|||
from litellm.proxy.common_utils.proxy_state import ProxyState
|
||||
from litellm.proxy.common_utils.reset_budget_job import ResetBudgetJob
|
||||
from litellm.proxy.common_utils.swagger_utils import ERROR_RESPONSES
|
||||
from litellm.proxy.credential_endpoints.endpoints import router as credential_router
|
||||
from litellm.proxy.fine_tuning_endpoints.endpoints import router as fine_tuning_router
|
||||
from litellm.proxy.fine_tuning_endpoints.endpoints import set_fine_tuning_config
|
||||
from litellm.proxy.guardrails.guardrail_endpoints import router as guardrails_router
|
||||
|
@ -8595,6 +8596,7 @@ app.include_router(router)
|
|||
app.include_router(batches_router)
|
||||
app.include_router(rerank_router)
|
||||
app.include_router(fine_tuning_router)
|
||||
app.include_router(credential_router)
|
||||
app.include_router(vertex_router)
|
||||
app.include_router(llm_passthrough_router)
|
||||
app.include_router(anthropic_router)
|
||||
|
|
|
@ -18,11 +18,13 @@ from openai.types.moderation import (
|
|||
CategoryScores,
|
||||
)
|
||||
from openai.types.moderation_create_response import Moderation, ModerationCreateResponse
|
||||
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
|
||||
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr, Secret
|
||||
from typing_extensions import Callable, Dict, Required, TypedDict, override
|
||||
|
||||
import litellm
|
||||
|
||||
SecretDict = Secret[dict]
|
||||
|
||||
from ..litellm_core_utils.core_helpers import map_finish_reason
|
||||
from .guardrails import GuardrailEventHooks
|
||||
from .llms.openai import (
|
||||
|
@ -2016,5 +2018,5 @@ class RawRequestTypedDict(TypedDict, total=False):
|
|||
|
||||
class CredentialItem(BaseModel):
|
||||
credential_name: str
|
||||
credential_values: dict
|
||||
credential_values: SecretDict
|
||||
credential_info: dict
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue