From faa4dfe03ec42a8cf6e7b99ff98a9584491c994c Mon Sep 17 00:00:00 2001 From: sha-ahammed Date: Wed, 5 Jun 2024 16:48:38 +0530 Subject: [PATCH] feat: Add Ollama as a provider in the proxy UI --- litellm/llms/ollama.py | 15 ++++++++++++++- litellm/utils.py | 4 ++++ .../src/components/model_dashboard.tsx | 3 +++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index 283878056..76687839d 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -2,8 +2,9 @@ from itertools import chain import requests, types, time # type: ignore import json, uuid import traceback -from typing import Optional +from typing import Optional, List import litellm +from litellm.types.utils import ProviderField import httpx, aiohttp, asyncio # type: ignore from .prompt_templates.factory import prompt_factory, custom_prompt @@ -124,6 +125,18 @@ class OllamaConfig: ) and v is not None } + + def get_required_params(self) -> List[ProviderField]: + """For a given provider, return it's required fields with a description""" + return [ + ProviderField( + field_name="base_url", + field_type="string", + field_description="Your Ollama API Base", + field_value="http://10.10.11.249:11434", + ) + ] + def get_supported_openai_params( self, ): diff --git a/litellm/utils.py b/litellm/utils.py index a2a237123..6ae62b417 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -7344,6 +7344,10 @@ def get_provider_fields(custom_llm_provider: str) -> List[ProviderField]: if custom_llm_provider == "databricks": return litellm.DatabricksConfig().get_required_params() + + elif custom_llm_provider == "ollama": + return litellm.OllamaConfig().get_required_params() + else: return [] diff --git a/ui/litellm-dashboard/src/components/model_dashboard.tsx b/ui/litellm-dashboard/src/components/model_dashboard.tsx index adb45346b..73e5a7a8f 100644 --- a/ui/litellm-dashboard/src/components/model_dashboard.tsx +++ b/ui/litellm-dashboard/src/components/model_dashboard.tsx @@ -145,6 +145,7 @@ enum Providers { OpenAI_Compatible = "OpenAI-Compatible Endpoints (Groq, Together AI, Mistral AI, etc.)", Vertex_AI = "Vertex AI (Anthropic, Gemini, etc.)", Databricks = "Databricks", + Ollama = "Ollama", } const provider_map: Record = { @@ -156,6 +157,7 @@ const provider_map: Record = { OpenAI_Compatible: "openai", Vertex_AI: "vertex_ai", Databricks: "databricks", + Ollama: "ollama", }; const retry_policy_map: Record = { @@ -1747,6 +1749,7 @@ const ModelDashboard: React.FC = ({ )} {selectedProvider != Providers.Bedrock && selectedProvider != Providers.Vertex_AI && + selectedProvider != Providers.Ollama && (dynamicProviderForm === undefined || dynamicProviderForm.fields.length == 0) && (