mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Litellm dev 03 08 2025 p3 (#9089)
* feat(ollama_chat.py): pass down http client to ollama_chat enables easier testing * fix(factory.py): fix passing images to ollama's `/api/generate` endpoint Fixes https://github.com/BerriAI/litellm/issues/6683 * fix(factory.py): fix ollama pt to handle templating correctly
This commit is contained in:
parent
93273723cd
commit
e00d4fb18c
5 changed files with 165 additions and 52 deletions
|
@ -1,4 +1,5 @@
|
|||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
@ -76,6 +77,45 @@ def test_ollama_json_mode():
|
|||
# test_ollama_json_mode()
|
||||
|
||||
|
||||
def test_ollama_vision_model():
|
||||
from litellm.llms.custom_httpx.http_handler import HTTPHandler
|
||||
|
||||
client = HTTPHandler()
|
||||
from unittest.mock import patch
|
||||
|
||||
with patch.object(client, "post") as mock_post:
|
||||
try:
|
||||
litellm.completion(
|
||||
model="ollama/llama3.2-vision:11b",
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{"type": "text", "text": "Whats in this image?"},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": "https://dummyimage.com/100/100/fff&text=Test+image"
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
client=client,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
mock_post.assert_called()
|
||||
|
||||
print(mock_post.call_args.kwargs)
|
||||
|
||||
json_data = json.loads(mock_post.call_args.kwargs["data"])
|
||||
assert json_data["model"] == "llama3.2-vision:11b"
|
||||
assert "images" in json_data
|
||||
assert "prompt" in json_data
|
||||
assert json_data["prompt"].startswith("### User:\n")
|
||||
|
||||
|
||||
mock_ollama_embedding_response = EmbeddingResponse(model="ollama/nomic-embed-text")
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue