mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
(fix) fix anyscale context window
This commit is contained in:
parent
8534bf96d5
commit
c4f9ef86c9
1 changed files with 15 additions and 8 deletions
|
@ -761,6 +761,20 @@
|
||||||
"litellm_provider": "perplexity",
|
"litellm_provider": "perplexity",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
|
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.00000015,
|
||||||
|
"output_cost_per_token": 0.00000015,
|
||||||
|
"litellm_provider": "anyscale",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
|
"anyscale/Zephyr-7b-beta": {
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"input_cost_per_token": 0.00000015,
|
||||||
|
"output_cost_per_token": 0.00000015,
|
||||||
|
"litellm_provider": "anyscale",
|
||||||
|
"mode": "chat"
|
||||||
|
},
|
||||||
"anyscale/meta-llama/Llama-2-7b-chat-hf": {
|
"anyscale/meta-llama/Llama-2-7b-chat-hf": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
"input_cost_per_token": 0.00000015,
|
"input_cost_per_token": 0.00000015,
|
||||||
|
@ -768,13 +782,6 @@
|
||||||
"litellm_provider": "anyscale",
|
"litellm_provider": "anyscale",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
|
||||||
"max_tokens": 4096,
|
|
||||||
"input_cost_per_token": 0.00000015,
|
|
||||||
"output_cost_per_token": 0.00000015,
|
|
||||||
"litellm_provider": "anyscale",
|
|
||||||
"mode": "chat"
|
|
||||||
},
|
|
||||||
"anyscale/meta-llama/Llama-2-13b-chat-hf": {
|
"anyscale/meta-llama/Llama-2-13b-chat-hf": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
"input_cost_per_token": 0.00000025,
|
"input_cost_per_token": 0.00000025,
|
||||||
|
@ -790,7 +797,7 @@
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"anyscale/codellama/CodeLlama-34b-Instruct-hf": {
|
"anyscale/codellama/CodeLlama-34b-Instruct-hf": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 16384,
|
||||||
"input_cost_per_token": 0.000001,
|
"input_cost_per_token": 0.000001,
|
||||||
"output_cost_per_token": 0.000001,
|
"output_cost_per_token": 0.000001,
|
||||||
"litellm_provider": "anyscale",
|
"litellm_provider": "anyscale",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue