fix(ollama.py): fix get model info request

Fixes https://github.com/BerriAI/litellm/issues/6703
This commit is contained in:
Krrish Dholakia 2024-11-14 01:04:33 +05:30
parent 1c3dcd4b25
commit b6c9032454
2 changed files with 10 additions and 3 deletions

View file

@ -185,6 +185,8 @@ class OllamaConfig:
"name": "mistral"
}'
"""
if model.startswith("ollama/") or model.startswith("ollama_chat/"):
model = model.split("/", 1)[1]
api_base = get_secret_str("OLLAMA_API_BASE") or "http://localhost:11434"
try:

View file

@ -89,11 +89,16 @@ def test_get_model_info_ollama_chat():
"template": "tools",
}
),
):
) as mock_client:
info = OllamaConfig().get_model_info("mistral")
print("info", info)
assert info["supports_function_calling"] is True
info = get_model_info("ollama/mistral")
print("info", info)
assert info["supports_function_calling"] is True
mock_client.assert_called()
print(mock_client.call_args.kwargs)
assert mock_client.call_args.kwargs["json"]["name"] == "mistral"