mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
feat: Add Ollama as a provider in the proxy UI
This commit is contained in:
parent
499933f943
commit
faa4dfe03e
3 changed files with 21 additions and 1 deletions
|
@ -2,8 +2,9 @@ from itertools import chain
|
|||
import requests, types, time # type: ignore
|
||||
import json, uuid
|
||||
import traceback
|
||||
from typing import Optional
|
||||
from typing import Optional, List
|
||||
import litellm
|
||||
from litellm.types.utils import ProviderField
|
||||
import httpx, aiohttp, asyncio # type: ignore
|
||||
from .prompt_templates.factory import prompt_factory, custom_prompt
|
||||
|
||||
|
@ -124,6 +125,18 @@ class OllamaConfig:
|
|||
)
|
||||
and v is not None
|
||||
}
|
||||
|
||||
def get_required_params(self) -> List[ProviderField]:
|
||||
"""For a given provider, return it's required fields with a description"""
|
||||
return [
|
||||
ProviderField(
|
||||
field_name="base_url",
|
||||
field_type="string",
|
||||
field_description="Your Ollama API Base",
|
||||
field_value="http://10.10.11.249:11434",
|
||||
)
|
||||
]
|
||||
|
||||
def get_supported_openai_params(
|
||||
self,
|
||||
):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue