build(model_prices_and_context_window.json): fix gpt-4o max tokens

This commit is contained in:
Krrish Dholakia 2024-05-13 12:11:15 -07:00
parent 6c2168b6f7
commit 5dc3f157a6
2 changed files with 27 additions and 3 deletions

View file

@ -9,6 +9,30 @@
"mode": "chat",
"supports_function_calling": true
},
"gpt-4o": {
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 4096,
"input_cost_per_token": 0.000005,
"output_cost_per_token": 0.000015,
"litellm_provider": "openai",
"mode": "chat",
"supports_function_calling": true,
"supports_parallel_function_calling": true,
"supports_vision": true
},
"gpt-4o-2024-05-13": {
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 4096,
"input_cost_per_token": 0.000005,
"output_cost_per_token": 0.000015,
"litellm_provider": "openai",
"mode": "chat",
"supports_function_calling": true,
"supports_parallel_function_calling": true,
"supports_vision": true
},
"gpt-4-turbo-preview": {
"max_tokens": 4096,
"max_input_tokens": 128000,
@ -3366,4 +3390,4 @@
"mode": "embedding"
}
}
}

View file

@ -12,7 +12,7 @@
"gpt-4o": {
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 2048,
"max_output_tokens": 4096,
"input_cost_per_token": 0.000005,
"output_cost_per_token": 0.000015,
"litellm_provider": "openai",
@ -22,7 +22,7 @@
"supports_vision": true
},
"gpt-4o-2024-05-13": {
"max_tokens": 2048,
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 4096,
"input_cost_per_token": 0.000005,