mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
build: merge branch
This commit is contained in:
parent
f63cf00306
commit
4418e6dd14
9 changed files with 301 additions and 56 deletions
|
@ -254,7 +254,6 @@ class BaseLLMChatTest(ABC):
|
|||
# relevant issue: https://github.com/BerriAI/litellm/issues/6741
|
||||
assert response.choices[0].message.content is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"response_format",
|
||||
[
|
||||
|
@ -337,7 +336,6 @@ class BaseLLMChatTest(ABC):
|
|||
|
||||
print(f"translated_params={translated_params}")
|
||||
|
||||
|
||||
@pytest.mark.flaky(retries=6, delay=1)
|
||||
def test_json_response_pydantic_obj(self):
|
||||
litellm.set_verbose = True
|
||||
|
@ -613,6 +611,46 @@ class BaseLLMChatTest(ABC):
|
|||
|
||||
assert response is not None
|
||||
|
||||
def test_image_url_string(self):
|
||||
litellm.set_verbose = True
|
||||
from litellm.utils import supports_vision
|
||||
|
||||
os.environ["LITELLM_LOCAL_MODEL_COST_MAP"] = "True"
|
||||
litellm.model_cost = litellm.get_model_cost_map(url="")
|
||||
|
||||
image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"
|
||||
|
||||
base_completion_call_args = self.get_base_completion_call_args()
|
||||
if not supports_vision(base_completion_call_args["model"], None):
|
||||
pytest.skip("Model does not support image input")
|
||||
elif "http://" in image_url and "fireworks_ai" in base_completion_call_args.get(
|
||||
"model"
|
||||
):
|
||||
pytest.skip("Model does not support http:// input")
|
||||
|
||||
image_url_param = image_url
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{"type": "text", "text": "What's in this image?"},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": image_url_param,
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
try:
|
||||
response = self.completion_function(
|
||||
**base_completion_call_args, messages=messages
|
||||
)
|
||||
except litellm.InternalServerError:
|
||||
pytest.skip("Model is overloaded")
|
||||
|
||||
assert response is not None
|
||||
|
||||
@pytest.mark.flaky(retries=4, delay=1)
|
||||
def test_prompt_caching(self):
|
||||
litellm.set_verbose = True
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue