mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
build(model_prices_and_context_window.json): fixes https://github.com/BerriAI/litellm/issues/4305
This commit is contained in:
parent
473509aed3
commit
db2cf5b5a5
2 changed files with 6 additions and 6 deletions
|
@ -886,7 +886,7 @@
|
||||||
"max_tokens": 8192,
|
"max_tokens": 8192,
|
||||||
"max_input_tokens": 8192,
|
"max_input_tokens": 8192,
|
||||||
"max_output_tokens": 8192,
|
"max_output_tokens": 8192,
|
||||||
"input_cost_per_token": 0.00000010,
|
"input_cost_per_token": 0.00000005,
|
||||||
"output_cost_per_token": 0.00000010,
|
"output_cost_per_token": 0.00000010,
|
||||||
"litellm_provider": "groq",
|
"litellm_provider": "groq",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
@ -896,8 +896,8 @@
|
||||||
"max_tokens": 8192,
|
"max_tokens": 8192,
|
||||||
"max_input_tokens": 8192,
|
"max_input_tokens": 8192,
|
||||||
"max_output_tokens": 8192,
|
"max_output_tokens": 8192,
|
||||||
"input_cost_per_token": 0.00000064,
|
"input_cost_per_token": 0.00000059,
|
||||||
"output_cost_per_token": 0.00000080,
|
"output_cost_per_token": 0.00000079,
|
||||||
"litellm_provider": "groq",
|
"litellm_provider": "groq",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true
|
"supports_function_calling": true
|
||||||
|
|
|
@ -886,7 +886,7 @@
|
||||||
"max_tokens": 8192,
|
"max_tokens": 8192,
|
||||||
"max_input_tokens": 8192,
|
"max_input_tokens": 8192,
|
||||||
"max_output_tokens": 8192,
|
"max_output_tokens": 8192,
|
||||||
"input_cost_per_token": 0.00000010,
|
"input_cost_per_token": 0.00000005,
|
||||||
"output_cost_per_token": 0.00000010,
|
"output_cost_per_token": 0.00000010,
|
||||||
"litellm_provider": "groq",
|
"litellm_provider": "groq",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
|
@ -896,8 +896,8 @@
|
||||||
"max_tokens": 8192,
|
"max_tokens": 8192,
|
||||||
"max_input_tokens": 8192,
|
"max_input_tokens": 8192,
|
||||||
"max_output_tokens": 8192,
|
"max_output_tokens": 8192,
|
||||||
"input_cost_per_token": 0.00000064,
|
"input_cost_per_token": 0.00000059,
|
||||||
"output_cost_per_token": 0.00000080,
|
"output_cost_per_token": 0.00000079,
|
||||||
"litellm_provider": "groq",
|
"litellm_provider": "groq",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true
|
"supports_function_calling": true
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue