mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(proxy_server): fix linting issues
This commit is contained in:
parent
c3616ebae6
commit
2a681e578c
2 changed files with 7 additions and 7 deletions
|
@ -138,7 +138,7 @@ log_file = "api_log.json"
|
|||
worker_config = None
|
||||
master_key = None
|
||||
prisma_client = None
|
||||
config_cache = {}
|
||||
config_cache: dict = {}
|
||||
### REDIS QUEUE ###
|
||||
async_result = None
|
||||
celery_app_conn = None
|
||||
|
@ -487,7 +487,7 @@ def model_list():
|
|||
try:
|
||||
response = requests.get("http://0.0.0.0:11434/api/tags")
|
||||
models = response.json()["models"]
|
||||
ollama_models = [m["name"].replace(":latest", "") for m in models]
|
||||
ollama_models = ["ollama/" + m["name"].replace(":latest", "") for m in models]
|
||||
all_models.extend(ollama_models)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue