mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
add GET, DELETE responses API endpoints
This commit is contained in:
parent
0a3c2b7229
commit
f106c1d14a
4 changed files with 114 additions and 6 deletions
|
@ -108,7 +108,13 @@ class ProxyBaseLLMRequestProcessing:
|
|||
user_api_key_dict: UserAPIKeyAuth,
|
||||
proxy_logging_obj: ProxyLogging,
|
||||
proxy_config: ProxyConfig,
|
||||
route_type: Literal["acompletion", "aresponses", "_arealtime"],
|
||||
route_type: Literal[
|
||||
"acompletion",
|
||||
"aresponses",
|
||||
"_arealtime",
|
||||
"aget_responses",
|
||||
"adelete_responses",
|
||||
],
|
||||
version: Optional[str] = None,
|
||||
user_model: Optional[str] = None,
|
||||
user_temperature: Optional[float] = None,
|
||||
|
@ -178,7 +184,13 @@ class ProxyBaseLLMRequestProcessing:
|
|||
request: Request,
|
||||
fastapi_response: Response,
|
||||
user_api_key_dict: UserAPIKeyAuth,
|
||||
route_type: Literal["acompletion", "aresponses", "_arealtime"],
|
||||
route_type: Literal[
|
||||
"acompletion",
|
||||
"aresponses",
|
||||
"_arealtime",
|
||||
"aget_responses",
|
||||
"adelete_responses",
|
||||
],
|
||||
proxy_logging_obj: ProxyLogging,
|
||||
general_settings: dict,
|
||||
proxy_config: ProxyConfig,
|
||||
|
|
|
@ -106,8 +106,49 @@ async def get_response(
|
|||
-H "Authorization: Bearer sk-1234"
|
||||
```
|
||||
"""
|
||||
# TODO: Implement response retrieval logic
|
||||
pass
|
||||
from litellm.proxy.proxy_server import (
|
||||
_read_request_body,
|
||||
general_settings,
|
||||
llm_router,
|
||||
proxy_config,
|
||||
proxy_logging_obj,
|
||||
select_data_generator,
|
||||
user_api_base,
|
||||
user_max_tokens,
|
||||
user_model,
|
||||
user_request_timeout,
|
||||
user_temperature,
|
||||
version,
|
||||
)
|
||||
|
||||
data = await _read_request_body(request=request)
|
||||
processor = ProxyBaseLLMRequestProcessing(data=data)
|
||||
try:
|
||||
return await processor.base_process_llm_request(
|
||||
request=request,
|
||||
fastapi_response=fastapi_response,
|
||||
user_api_key_dict=user_api_key_dict,
|
||||
route_type="aget_responses",
|
||||
proxy_logging_obj=proxy_logging_obj,
|
||||
llm_router=llm_router,
|
||||
general_settings=general_settings,
|
||||
proxy_config=proxy_config,
|
||||
select_data_generator=select_data_generator,
|
||||
model=None,
|
||||
user_model=user_model,
|
||||
user_temperature=user_temperature,
|
||||
user_request_timeout=user_request_timeout,
|
||||
user_max_tokens=user_max_tokens,
|
||||
user_api_base=user_api_base,
|
||||
version=version,
|
||||
)
|
||||
except Exception as e:
|
||||
raise await processor._handle_llm_api_exception(
|
||||
e=e,
|
||||
user_api_key_dict=user_api_key_dict,
|
||||
proxy_logging_obj=proxy_logging_obj,
|
||||
version=version,
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
|
@ -136,8 +177,49 @@ async def delete_response(
|
|||
-H "Authorization: Bearer sk-1234"
|
||||
```
|
||||
"""
|
||||
# TODO: Implement response deletion logic
|
||||
pass
|
||||
from litellm.proxy.proxy_server import (
|
||||
_read_request_body,
|
||||
general_settings,
|
||||
llm_router,
|
||||
proxy_config,
|
||||
proxy_logging_obj,
|
||||
select_data_generator,
|
||||
user_api_base,
|
||||
user_max_tokens,
|
||||
user_model,
|
||||
user_request_timeout,
|
||||
user_temperature,
|
||||
version,
|
||||
)
|
||||
|
||||
data = await _read_request_body(request=request)
|
||||
processor = ProxyBaseLLMRequestProcessing(data=data)
|
||||
try:
|
||||
return await processor.base_process_llm_request(
|
||||
request=request,
|
||||
fastapi_response=fastapi_response,
|
||||
user_api_key_dict=user_api_key_dict,
|
||||
route_type="adelete_responses",
|
||||
proxy_logging_obj=proxy_logging_obj,
|
||||
llm_router=llm_router,
|
||||
general_settings=general_settings,
|
||||
proxy_config=proxy_config,
|
||||
select_data_generator=select_data_generator,
|
||||
model=None,
|
||||
user_model=user_model,
|
||||
user_temperature=user_temperature,
|
||||
user_request_timeout=user_request_timeout,
|
||||
user_max_tokens=user_max_tokens,
|
||||
user_api_base=user_api_base,
|
||||
version=version,
|
||||
)
|
||||
except Exception as e:
|
||||
raise await processor._handle_llm_api_exception(
|
||||
e=e,
|
||||
user_api_key_dict=user_api_key_dict,
|
||||
proxy_logging_obj=proxy_logging_obj,
|
||||
version=version,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
|
|
|
@ -22,6 +22,8 @@ ROUTE_ENDPOINT_MAPPING = {
|
|||
"amoderation": "/moderations",
|
||||
"arerank": "/rerank",
|
||||
"aresponses": "/responses",
|
||||
"aget_responses": "/responses",
|
||||
"adelete_responses": "/responses",
|
||||
}
|
||||
|
||||
|
||||
|
@ -48,6 +50,8 @@ async def route_request(
|
|||
"arerank",
|
||||
"aresponses",
|
||||
"_arealtime", # private function for realtime API
|
||||
"aget_responses",
|
||||
"adelete_responses",
|
||||
],
|
||||
):
|
||||
"""
|
||||
|
|
|
@ -3083,6 +3083,12 @@ class Router:
|
|||
"responses",
|
||||
"afile_delete",
|
||||
"afile_content",
|
||||
# responses api GET
|
||||
"get_responses",
|
||||
"aget_responses",
|
||||
# responses api DELETE
|
||||
"delete_responses",
|
||||
"adelete_responses",
|
||||
] = "assistants",
|
||||
):
|
||||
"""
|
||||
|
@ -3130,6 +3136,10 @@ class Router:
|
|||
elif call_type in (
|
||||
"anthropic_messages",
|
||||
"aresponses",
|
||||
"get_responses",
|
||||
"aget_responses",
|
||||
"delete_responses",
|
||||
"adelete_responses",
|
||||
):
|
||||
return await self._ageneric_api_call_with_fallbacks(
|
||||
original_function=original_function,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue