(docs) add ollama/llama2 llm max_tokens + cost

This commit is contained in:
ishaan-jaff 2023-10-10 08:33:32 -07:00
parent c94ee62bcf
commit a27d9ad6bc

View file

@ -542,5 +542,33 @@
"together-ai-40.1b-70b": {
"input_cost_per_token": 0.000003,
"output_cost_per_token": 0.000003
},
"ollama/llama2": {
"max_tokens": 4096,
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
},
"ollama/llama2:13b": {
"max_tokens": 4096,
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
},
"ollama/llama2:70b": {
"max_tokens": 4096,
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
},
"ollama/llama2-uncensored": {
"max_tokens": 4096,
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
}
}