mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
Merge 9a209078e2
into b82af5b826
This commit is contained in:
commit
abc204422d
1 changed files with 149 additions and 81 deletions
|
@ -3867,25 +3867,24 @@
|
|||
"supports_prompt_caching": true
|
||||
},
|
||||
"groq/deepseek-r1-distill-llama-70b": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 131072,
|
||||
"input_cost_per_token": 0.00000075,
|
||||
"output_cost_per_token": 0.00000099,
|
||||
"max_tokens": 128000,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 128000,
|
||||
"input_cost_per_token": 7.5e-07,
|
||||
"output_cost_per_token": 9.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_system_messages": false,
|
||||
"supports_function_calling": false,
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_response_schema": false,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"groq/llama-3.3-70b-versatile": {
|
||||
"max_tokens": 8192,
|
||||
"max_tokens": 32768,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000059,
|
||||
"output_cost_per_token": 0.00000079,
|
||||
"max_output_tokens": 32768,
|
||||
"input_cost_per_token": 5.9e-07,
|
||||
"output_cost_per_token": 7.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
|
@ -3896,11 +3895,21 @@
|
|||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000059,
|
||||
"output_cost_per_token": 0.00000099,
|
||||
"input_cost_per_token": 5.9e-07,
|
||||
"output_cost_per_token": 9.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-04-14"
|
||||
},
|
||||
"groq/llama-guard-3-8b": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 2e-07,
|
||||
"output_cost_per_token": 2e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat"
|
||||
},
|
||||
"groq/llama2-70b-4096": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -3918,106 +3927,106 @@
|
|||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000008,
|
||||
"input_cost_per_token": 5e-08,
|
||||
"output_cost_per_token": 8e-08,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"groq/llama-3.2-1b-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000004,
|
||||
"output_cost_per_token": 0.00000004,
|
||||
"input_cost_per_token": 4e-08,
|
||||
"output_cost_per_token": 4e-08,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-04-14"
|
||||
},
|
||||
"groq/llama-3.2-3b-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000006,
|
||||
"output_cost_per_token": 0.00000006,
|
||||
"input_cost_per_token": 6e-08,
|
||||
"output_cost_per_token": 6e-08,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-04-14"
|
||||
},
|
||||
"groq/llama-3.2-11b-text-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000018,
|
||||
"output_cost_per_token": 0.00000018,
|
||||
"input_cost_per_token": 1.8e-07,
|
||||
"output_cost_per_token": 1.8e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2024-10-28"
|
||||
},
|
||||
"groq/llama-3.2-11b-vision-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000018,
|
||||
"output_cost_per_token": 0.00000018,
|
||||
"input_cost_per_token": 1.8e-07,
|
||||
"output_cost_per_token": 1.8e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_vision": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-04-14"
|
||||
},
|
||||
"groq/llama-3.2-90b-text-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000009,
|
||||
"output_cost_per_token": 0.0000009,
|
||||
"input_cost_per_token": 9e-07,
|
||||
"output_cost_per_token": 9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2024-11-25"
|
||||
},
|
||||
"groq/llama-3.2-90b-vision-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000009,
|
||||
"output_cost_per_token": 0.0000009,
|
||||
"input_cost_per_token": 9e-07,
|
||||
"output_cost_per_token": 9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_vision": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-04-14"
|
||||
},
|
||||
"groq/llama3-70b-8192": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000059,
|
||||
"output_cost_per_token": 0.00000079,
|
||||
"input_cost_per_token": 5.9e-07,
|
||||
"output_cost_per_token": 7.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"groq/llama-3.1-8b-instant": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000008,
|
||||
"input_cost_per_token": 5e-08,
|
||||
"output_cost_per_token": 8e-08,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
|
@ -4028,13 +4037,14 @@
|
|||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000059,
|
||||
"output_cost_per_token": 0.00000079,
|
||||
"input_cost_per_token": 5.9e-07,
|
||||
"output_cost_per_token": 7.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-01-24"
|
||||
},
|
||||
"groq/llama-3.1-405b-reasoning": {
|
||||
"max_tokens": 8192,
|
||||
|
@ -4048,83 +4058,141 @@
|
|||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"groq/mixtral-8x7b-32768": {
|
||||
"max_tokens": 32768,
|
||||
"max_input_tokens": 32768,
|
||||
"max_output_tokens": 32768,
|
||||
"input_cost_per_token": 0.00000024,
|
||||
"output_cost_per_token": 0.00000024,
|
||||
"groq/meta-llama/llama-4-scout-17b-16e-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 1.1e-07,
|
||||
"output_cost_per_token": 3.4e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"groq/meta-llama/llama-4-maverick-17b-128e-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 2e-07,
|
||||
"output_cost_per_token": 6e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"groq/mistral-saba-24b": {
|
||||
"max_tokens": 32000,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 32000,
|
||||
"input_cost_per_token": 7.9e-07,
|
||||
"output_cost_per_token": 7.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat"
|
||||
},
|
||||
"groq/mixtral-8x7b-32768": {
|
||||
"max_tokens": 32768,
|
||||
"max_input_tokens": 32768,
|
||||
"max_output_tokens": 32768,
|
||||
"input_cost_per_token": 2.4e-07,
|
||||
"output_cost_per_token": 2.4e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-03-20"
|
||||
},
|
||||
"groq/gemma-7b-it": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000007,
|
||||
"input_cost_per_token": 7e-08,
|
||||
"output_cost_per_token": 7e-08,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2024-12-18"
|
||||
},
|
||||
"groq/gemma2-9b-it": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000020,
|
||||
"output_cost_per_token": 0.00000020,
|
||||
"input_cost_per_token": 2e-07,
|
||||
"output_cost_per_token": 2e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_function_calling": false,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": false
|
||||
},
|
||||
"groq/llama3-groq-70b-8192-tool-use-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000089,
|
||||
"output_cost_per_token": 0.00000089,
|
||||
"input_cost_per_token": 8.9e-07,
|
||||
"output_cost_per_token": 8.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-1-6"
|
||||
},
|
||||
"groq/llama3-groq-8b-8192-tool-use-preview": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000019,
|
||||
"output_cost_per_token": 0.00000019,
|
||||
"input_cost_per_token": 1.9e-07,
|
||||
"output_cost_per_token": 1.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true,
|
||||
"deprecation_date": "2025-1-6"
|
||||
},
|
||||
"groq/qwen-qwq-32b": {
|
||||
"max_tokens": 128000,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 128000,
|
||||
"input_cost_per_token": 2.9e-07,
|
||||
"output_cost_per_token": 3.9e-07,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"groq/playai-tts": {
|
||||
"max_tokens": 10000,
|
||||
"max_input_tokens": 10000,
|
||||
"max_output_tokens": 10000,
|
||||
"input_cost_per_character": 5e-05,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "audio_speech"
|
||||
},
|
||||
"groq/whisper-large-v3": {
|
||||
"mode": "audio_transcription",
|
||||
"input_cost_per_second": 0.00003083,
|
||||
"output_cost_per_second": 0,
|
||||
"litellm_provider": "groq"
|
||||
"input_cost_per_second": 3.083e-05,
|
||||
"output_cost_per_second": 0.0,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "audio_transcription"
|
||||
},
|
||||
"groq/whisper-large-v3-turbo": {
|
||||
"mode": "audio_transcription",
|
||||
"input_cost_per_second": 0.00001111,
|
||||
"output_cost_per_second": 0,
|
||||
"litellm_provider": "groq"
|
||||
"input_cost_per_second": 1.111e-05,
|
||||
"output_cost_per_second": 0.0,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "audio_transcription"
|
||||
},
|
||||
"groq/distil-whisper-large-v3-en": {
|
||||
"mode": "audio_transcription",
|
||||
"input_cost_per_second": 0.00000556,
|
||||
"output_cost_per_second": 0,
|
||||
"litellm_provider": "groq"
|
||||
"input_cost_per_second": 5.56e-06,
|
||||
"output_cost_per_second": 0.0,
|
||||
"litellm_provider": "groq",
|
||||
"mode": "audio_transcription"
|
||||
},
|
||||
"cerebras/llama3.1-8b": {
|
||||
"max_tokens": 128000,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue