Litellm ollama refactor (#7162)

* refactor(ollama/): refactor ollama `/api/generate` to use base llm config

Addresses https://github.com/andrewyng/aisuite/issues/113#issuecomment-2512369132

* test: skip unresponsive test

* test(test_secret_manager.py): mark flaky test

* test: fix google sm test

* fix: fix init.py
This commit is contained in:
Krish Dholakia 2024-12-10 21:45:35 -08:00 committed by GitHub
parent 6c6834dde7
commit e9fbefca5d
11 changed files with 322 additions and 235 deletions

View file

@ -80,7 +80,7 @@ mock_ollama_embedding_response = EmbeddingResponse(model="ollama/nomic-embed-tex
@mock.patch(
"litellm.llms.ollama.ollama_embeddings",
"litellm.llms.ollama.completion.handler.ollama_embeddings",
return_value=mock_ollama_embedding_response,
)
def test_ollama_embeddings(mock_embeddings):
@ -107,7 +107,7 @@ def test_ollama_embeddings(mock_embeddings):
@mock.patch(
"litellm.llms.ollama.ollama_aembeddings",
"litellm.llms.ollama.completion.handler.ollama_aembeddings",
return_value=mock_ollama_embedding_response,
)
def test_ollama_aembeddings(mock_aembeddings):