Litellm ollama refactor (#7162)

* refactor(ollama/): refactor ollama `/api/generate` to use base llm config

Addresses https://github.com/andrewyng/aisuite/issues/113#issuecomment-2512369132

* test: skip unresponsive test

* test(test_secret_manager.py): mark flaky test

* test: fix google sm test

* fix: fix init.py
This commit is contained in:
Krish Dholakia 2024-12-10 21:45:35 -08:00 committed by GitHub
parent 6c6834dde7
commit e9fbefca5d
11 changed files with 322 additions and 235 deletions

View file

@ -29,7 +29,7 @@ def get_supported_openai_params( # noqa: PLR0915
if custom_llm_provider == "bedrock":
return litellm.AmazonConverseConfig().get_supported_openai_params(model=model)
elif custom_llm_provider == "ollama":
return litellm.OllamaConfig().get_supported_openai_params()
return litellm.OllamaConfig().get_supported_openai_params(model=model)
elif custom_llm_provider == "ollama_chat":
return litellm.OllamaChatConfig().get_supported_openai_params()
elif custom_llm_provider == "anthropic":