mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 10:14:26 +00:00
Add OpenAI o3 & 4o-mini (#10065)
Reference: - https://platform.openai.com/docs/models/o3 - https://platform.openai.com/docs/models/o4-mini
This commit is contained in:
parent
d8a1071bc4
commit
5c078af738
2 changed files with 136 additions and 0 deletions
|
@ -600,6 +600,40 @@
|
|||
"supports_vision": true,
|
||||
"supports_prompt_caching": true
|
||||
},
|
||||
"o3": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1e-5,
|
||||
"output_cost_per_token": 4e-5,
|
||||
"cache_read_input_token_cost": 2.5e-6,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o3-2025-04-16": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1e-5,
|
||||
"output_cost_per_token": 4e-5,
|
||||
"cache_read_input_token_cost": 2.5e-6,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o3-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
|
@ -634,6 +668,40 @@
|
|||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o4-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1.1e-6,
|
||||
"output_cost_per_token": 4.4e-6,
|
||||
"cache_read_input_token_cost": 2.75e-7,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o4-mini-2025-04-16": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1.1e-6,
|
||||
"output_cost_per_token": 4.4e-6,
|
||||
"cache_read_input_token_cost": 2.75e-7,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o1-mini-2024-09-12": {
|
||||
"max_tokens": 65536,
|
||||
"max_input_tokens": 128000,
|
||||
|
|
|
@ -600,6 +600,40 @@
|
|||
"supports_vision": true,
|
||||
"supports_prompt_caching": true
|
||||
},
|
||||
"o3": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1e-5,
|
||||
"output_cost_per_token": 4e-5,
|
||||
"cache_read_input_token_cost": 2.5e-6,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o3-2025-04-16": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1e-5,
|
||||
"output_cost_per_token": 4e-5,
|
||||
"cache_read_input_token_cost": 2.5e-6,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o3-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
|
@ -634,6 +668,40 @@
|
|||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o4-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1.1e-6,
|
||||
"output_cost_per_token": 4.4e-6,
|
||||
"cache_read_input_token_cost": 2.75e-7,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o4-mini-2025-04-16": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 1.1e-6,
|
||||
"output_cost_per_token": 4.4e-6,
|
||||
"cache_read_input_token_cost": 2.75e-7,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"o1-mini-2024-09-12": {
|
||||
"max_tokens": 65536,
|
||||
"max_input_tokens": 128000,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue