mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
[Docs] Responses API (#10172)
* docs litellm responses api * doc fix * docs responses API * add get_supported_openai_params for LiteLLMCompletionResponsesConfig * add Supported Responses API Parameters
This commit is contained in:
parent
d6b45ba143
commit
1c13fd2add
2 changed files with 468 additions and 49 deletions
|
@ -53,6 +53,26 @@ RESPONSES_API_SESSION_HANDLER = SessionHandler()
|
|||
|
||||
|
||||
class LiteLLMCompletionResponsesConfig:
|
||||
@staticmethod
|
||||
def get_supported_openai_params(model: str) -> list:
|
||||
"""
|
||||
LiteLLM Adapter from OpenAI Responses API to Chat Completion API supports a subset of OpenAI Responses API params
|
||||
"""
|
||||
return [
|
||||
"input",
|
||||
"model",
|
||||
"instructions",
|
||||
"max_output_tokens",
|
||||
"metadata",
|
||||
"parallel_tool_calls",
|
||||
"previous_response_id",
|
||||
"stream",
|
||||
"temperature",
|
||||
"tool_choice",
|
||||
"tools",
|
||||
"top_p",
|
||||
"user",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def transform_responses_api_request_to_chat_completion_request(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue