forked from phoenix/litellm-mirror
build(model_prices_and_context_window.json): add databricks models to model cost map
This commit is contained in:
parent
e6b9945ad9
commit
3ec4d1b1fc
2 changed files with 168 additions and 28 deletions
|
@ -1605,36 +1605,36 @@
|
|||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-70b": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000065,
|
||||
"output_cost_per_token": 0.00000275,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-70b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000065,
|
||||
"output_cost_per_token": 0.00000275,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-8b": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8086,
|
||||
"max_input_tokens": 8086,
|
||||
"max_output_tokens": 8086,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-8b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8086,
|
||||
"max_input_tokens": 8086,
|
||||
"max_output_tokens": 8086,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "replicate",
|
||||
|
@ -1898,7 +1898,7 @@
|
|||
"mode": "chat"
|
||||
},
|
||||
"openrouter/meta-llama/codellama-34b-instruct": {
|
||||
"max_tokens": 8096,
|
||||
"max_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000005,
|
||||
"litellm_provider": "openrouter",
|
||||
|
@ -3538,6 +3538,76 @@
|
|||
"output_cost_per_token": 0.000000,
|
||||
"litellm_provider": "voyage",
|
||||
"mode": "embedding"
|
||||
}
|
||||
},
|
||||
"databricks/databricks-dbrx-instruct": {
|
||||
"max_tokens": 32768,
|
||||
"max_input_tokens": 32768,
|
||||
"max_output_tokens": 32768,
|
||||
"input_cost_per_token": 0.00000075,
|
||||
"output_cost_per_token": 0.00000225,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-meta-llama-3-70b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-llama-2-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000015,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
|
||||
},
|
||||
"databricks/databricks-mixtral-8x7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-mpt-30b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-mpt-7b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000005,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-bge-large-en": {
|
||||
"max_tokens": 512,
|
||||
"max_input_tokens": 512,
|
||||
"output_vector_size": 1024,
|
||||
"input_cost_per_token": 0.0000001,
|
||||
"output_cost_per_token": 0.0,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "embedding",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1605,36 +1605,36 @@
|
|||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-70b": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000065,
|
||||
"output_cost_per_token": 0.00000275,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-70b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000065,
|
||||
"output_cost_per_token": 0.00000275,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-8b": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8086,
|
||||
"max_input_tokens": 8086,
|
||||
"max_output_tokens": 8086,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "replicate",
|
||||
"mode": "chat"
|
||||
},
|
||||
"replicate/meta/llama-3-8b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"max_tokens": 8086,
|
||||
"max_input_tokens": 8086,
|
||||
"max_output_tokens": 8086,
|
||||
"input_cost_per_token": 0.00000005,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "replicate",
|
||||
|
@ -1898,7 +1898,7 @@
|
|||
"mode": "chat"
|
||||
},
|
||||
"openrouter/meta-llama/codellama-34b-instruct": {
|
||||
"max_tokens": 8096,
|
||||
"max_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000005,
|
||||
"litellm_provider": "openrouter",
|
||||
|
@ -3538,6 +3538,76 @@
|
|||
"output_cost_per_token": 0.000000,
|
||||
"litellm_provider": "voyage",
|
||||
"mode": "embedding"
|
||||
}
|
||||
},
|
||||
"databricks/databricks-dbrx-instruct": {
|
||||
"max_tokens": 32768,
|
||||
"max_input_tokens": 32768,
|
||||
"max_output_tokens": 32768,
|
||||
"input_cost_per_token": 0.00000075,
|
||||
"output_cost_per_token": 0.00000225,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-meta-llama-3-70b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-llama-2-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000015,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
|
||||
},
|
||||
"databricks/databricks-mixtral-8x7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
"max_input_tokens": 4096,
|
||||
"max_output_tokens": 4096,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-mpt-30b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-mpt-7b-instruct": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.0000005,
|
||||
"output_cost_per_token": 0.0000005,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "chat",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
},
|
||||
"databricks/databricks-bge-large-en": {
|
||||
"max_tokens": 512,
|
||||
"max_input_tokens": 512,
|
||||
"output_vector_size": 1024,
|
||||
"input_cost_per_token": 0.0000001,
|
||||
"output_cost_per_token": 0.0,
|
||||
"litellm_provider": "databricks",
|
||||
"mode": "embedding",
|
||||
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue