From e3cd8e169b4b996b3633b048064ea135a07ca024 Mon Sep 17 00:00:00 2001 From: Kyrylo Yefimenko Date: Thu, 15 Aug 2024 12:27:51 +0100 Subject: [PATCH] Use sepecific llama2 and llama3 model names in Ollama --- model_prices_and_context_window.json | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 415d220f2..0a3fd494c 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -3354,6 +3354,15 @@ "litellm_provider": "ollama", "mode": "completion" }, + "ollama/llama2:7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, "ollama/llama2:13b": { "max_tokens": 4096, "max_input_tokens": 4096, @@ -3390,6 +3399,15 @@ "litellm_provider": "ollama", "mode": "chat" }, + "ollama/llama3:8b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, "ollama/llama3:70b": { "max_tokens": 8192, "max_input_tokens": 8192,