mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
[Feat] Add GET, DELETE Responses endpoints on LiteLLM Proxy (#10297)
* add GET responses endpoints on router * add GET responses endpoints on router * add GET responses endpoints on router * add DELETE responses endpoints on proxy * fixes for testing GET, DELETE endpoints * test_basic_responses api e2e
This commit is contained in:
parent
0a2c964db7
commit
5de101ab7b
8 changed files with 182 additions and 20 deletions
|
@ -176,6 +176,16 @@ class ResponsesAPIRequestUtils:
|
|||
response_id=response_id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_model_id_from_response_id(response_id: Optional[str]) -> Optional[str]:
|
||||
"""Get the model_id from the response_id"""
|
||||
if response_id is None:
|
||||
return None
|
||||
decoded_response_id = (
|
||||
ResponsesAPIRequestUtils._decode_responses_api_response_id(response_id)
|
||||
)
|
||||
return decoded_response_id.get("model_id") or None
|
||||
|
||||
|
||||
class ResponseAPILoggingUtils:
|
||||
@staticmethod
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue