forked from phoenix/litellm-mirror
feat(proxy_server.py): new 'supported_openai_params' endpoint
get supported openai params for a given model
This commit is contained in:
parent
bb85a5e6b2
commit
25df95ab10
2 changed files with 34 additions and 1 deletions
|
@ -260,7 +260,7 @@ class OpenAIConfig:
|
|||
|
||||
model_specific_params = []
|
||||
if (
|
||||
"gpt-3.5-turbo" in model or "gpt-4-turbo" in model or "gpt-4o" in model
|
||||
model != "gpt-3.5-turbo-16k" and model != "gpt-4"
|
||||
): # gpt-4 does not support 'response_format'
|
||||
model_specific_params.append("response_format")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue