forked from phoenix/litellm-mirror
Add o1 models on OpenRouter. (#5676)
This commit is contained in:
parent
00047de1c6
commit
795047c37f
1 changed files with 48 additions and 0 deletions
|
@ -3137,6 +3137,54 @@
|
||||||
"litellm_provider": "openrouter",
|
"litellm_provider": "openrouter",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
|
"openrouter/openai/o1-mini": {
|
||||||
|
"max_tokens": 65536,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 65536,
|
||||||
|
"input_cost_per_token": 0.000003,
|
||||||
|
"output_cost_per_token": 0.000012,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"openrouter/openai/o1-mini-2024-09-12": {
|
||||||
|
"max_tokens": 65536,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 65536,
|
||||||
|
"input_cost_per_token": 0.000003,
|
||||||
|
"output_cost_per_token": 0.000012,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"openrouter/openai/o1-preview": {
|
||||||
|
"max_tokens": 32768,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 32768,
|
||||||
|
"input_cost_per_token": 0.000015,
|
||||||
|
"output_cost_per_token": 0.000060,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"openrouter/openai/o1-preview-2024-09-12": {
|
||||||
|
"max_tokens": 32768,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 32768,
|
||||||
|
"input_cost_per_token": 0.000015,
|
||||||
|
"output_cost_per_token": 0.000060,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
"openrouter/openai/gpt-4o": {
|
"openrouter/openai/gpt-4o": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
"max_input_tokens": 128000,
|
"max_input_tokens": 128000,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue