mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
feat: Add Ollama as a provider in the proxy UI
This commit is contained in:
parent
499933f943
commit
faa4dfe03e
3 changed files with 21 additions and 1 deletions
|
@ -7344,6 +7344,10 @@ def get_provider_fields(custom_llm_provider: str) -> List[ProviderField]:
|
|||
|
||||
if custom_llm_provider == "databricks":
|
||||
return litellm.DatabricksConfig().get_required_params()
|
||||
|
||||
elif custom_llm_provider == "ollama":
|
||||
return litellm.OllamaConfig().get_required_params()
|
||||
|
||||
else:
|
||||
return []
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue