mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
Litellm dev 01 06 2025 p3 (#7596)
* build(model_prices_and_context_window.json): add gemini-1.5-pro 'supports_vision' = true Fixes https://github.com/BerriAI/litellm/issues/7592 * build(model_prices_and_context_window.json): add new mistral models pricing + model info
This commit is contained in:
parent
c22d9208da
commit
b397dc1497
3 changed files with 90 additions and 4 deletions
|
@ -1810,8 +1810,19 @@
|
||||||
"max_tokens": 128000,
|
"max_tokens": 128000,
|
||||||
"max_input_tokens": 128000,
|
"max_input_tokens": 128000,
|
||||||
"max_output_tokens": 128000,
|
"max_output_tokens": 128000,
|
||||||
"input_cost_per_token": 0.000003,
|
"input_cost_per_token": 0.000002,
|
||||||
"output_cost_per_token": 0.000009,
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true
|
||||||
|
},
|
||||||
|
"mistral/mistral-large-2411": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
"litellm_provider": "mistral",
|
"litellm_provider": "mistral",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
|
@ -1839,6 +1850,30 @@
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_assistant_prefill": true
|
"supports_assistant_prefill": true
|
||||||
},
|
},
|
||||||
|
"mistral/pixtral-large-latest": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"mistral/pixtral-large-2411": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
"mistral/pixtral-12b-2409": {
|
"mistral/pixtral-12b-2409": {
|
||||||
"max_tokens": 128000,
|
"max_tokens": 128000,
|
||||||
"max_input_tokens": 128000,
|
"max_input_tokens": 128000,
|
||||||
|
@ -2833,6 +2868,8 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
@ -2859,6 +2896,7 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
@ -2885,6 +2923,7 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
|
|
@ -1810,8 +1810,19 @@
|
||||||
"max_tokens": 128000,
|
"max_tokens": 128000,
|
||||||
"max_input_tokens": 128000,
|
"max_input_tokens": 128000,
|
||||||
"max_output_tokens": 128000,
|
"max_output_tokens": 128000,
|
||||||
"input_cost_per_token": 0.000003,
|
"input_cost_per_token": 0.000002,
|
||||||
"output_cost_per_token": 0.000009,
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true
|
||||||
|
},
|
||||||
|
"mistral/mistral-large-2411": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
"litellm_provider": "mistral",
|
"litellm_provider": "mistral",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
|
@ -1839,6 +1850,30 @@
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_assistant_prefill": true
|
"supports_assistant_prefill": true
|
||||||
},
|
},
|
||||||
|
"mistral/pixtral-large-latest": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"mistral/pixtral-large-2411": {
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"input_cost_per_token": 0.000002,
|
||||||
|
"output_cost_per_token": 0.000006,
|
||||||
|
"litellm_provider": "mistral",
|
||||||
|
"mode": "chat",
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_assistant_prefill": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
"mistral/pixtral-12b-2409": {
|
"mistral/pixtral-12b-2409": {
|
||||||
"max_tokens": 128000,
|
"max_tokens": 128000,
|
||||||
"max_input_tokens": 128000,
|
"max_input_tokens": 128000,
|
||||||
|
@ -2833,6 +2868,8 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
@ -2859,6 +2896,7 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
@ -2885,6 +2923,7 @@
|
||||||
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
"output_cost_per_character_above_128k_tokens": 0.0000025,
|
||||||
"litellm_provider": "vertex_ai-language-models",
|
"litellm_provider": "vertex_ai-language-models",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
"supports_vision": true,
|
||||||
"supports_system_messages": true,
|
"supports_system_messages": true,
|
||||||
"supports_function_calling": true,
|
"supports_function_calling": true,
|
||||||
"supports_tool_choice": true,
|
"supports_tool_choice": true,
|
||||||
|
|
|
@ -1418,3 +1418,11 @@ def test_get_valid_models_default(monkeypatch):
|
||||||
monkeypatch.setenv("FIREWORKS_API_KEY", "sk-1234")
|
monkeypatch.setenv("FIREWORKS_API_KEY", "sk-1234")
|
||||||
valid_models = get_valid_models()
|
valid_models = get_valid_models()
|
||||||
assert len(valid_models) > 0
|
assert len(valid_models) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_supports_vision_gemini():
|
||||||
|
os.environ["LITELLM_LOCAL_MODEL_COST_MAP"] = "True"
|
||||||
|
litellm.model_cost = litellm.get_model_cost_map(url="")
|
||||||
|
from litellm.utils import supports_vision
|
||||||
|
|
||||||
|
assert supports_vision("gemini-1.5-pro") is True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue