From d09486f93b2af506a928ecacc4aba102b33aa46a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 2 Nov 2023 09:18:26 -0700 Subject: [PATCH] (feat) model context window.json add ollama/mistral, ollama/codellama, ollama/orca-mini, ollama/vicuna --- model_prices_and_context_window.json | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 520144f431..ec496071fe 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -579,6 +579,34 @@ "litellm_provider": "ollama", "mode": "completion" }, + "ollama/mistral": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/codellama": { + "max_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/orca-mini": { + "max_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/vicuna": { + "max_tokens": 2048, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, "deepinfra/meta-llama/Llama-2-70b-chat-hf": { "max_tokens": 4096, "input_cost_per_token": 0.000000700,