mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
test: testing fixes
This commit is contained in:
parent
520cd7fa89
commit
10f76ec36c
4 changed files with 58 additions and 39 deletions
|
@ -750,7 +750,7 @@ def test_completion_ollama_hosted():
|
|||
messages=messages,
|
||||
max_tokens=10,
|
||||
num_retries=3,
|
||||
timeout=90,
|
||||
timeout=20,
|
||||
api_base="https://test-ollama-endpoint.onrender.com",
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
|
|
|
@ -183,7 +183,7 @@ def test_azure_completion_stream():
|
|||
# checks if the model response available in the async + stream callbacks is equal to the received response
|
||||
customHandler2 = MyCustomHandler()
|
||||
litellm.callbacks = [customHandler2]
|
||||
litellm.set_verbose = True
|
||||
litellm.set_verbose = False
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{
|
||||
|
|
|
@ -19,32 +19,39 @@ import litellm
|
|||
|
||||
|
||||
def test_image_generation_openai():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = litellm.image_generation(
|
||||
prompt="A cute baby sea otter", model="dall-e-3", num_retries=3
|
||||
prompt="A cute baby sea otter", model="dall-e-3"
|
||||
)
|
||||
print(f"response: {response}")
|
||||
assert len(response.data) > 0
|
||||
except litellm.RateLimitError as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
||||
# test_image_generation_openai()
|
||||
|
||||
|
||||
def test_image_generation_azure():
|
||||
try:
|
||||
response = litellm.image_generation(
|
||||
prompt="A cute baby sea otter",
|
||||
model="azure/",
|
||||
api_version="2023-06-01-preview",
|
||||
num_retries=3,
|
||||
prompt="A cute baby sea otter", model="azure/", api_version="2023-06-01-preview"
|
||||
)
|
||||
print(f"response: {response}")
|
||||
assert len(response.data) > 0
|
||||
|
||||
except litellm.RateLimitError as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
# test_image_generation_azure()
|
||||
|
||||
|
||||
def test_image_generation_azure_dall_e_3():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = litellm.image_generation(
|
||||
prompt="A cute baby sea otter",
|
||||
|
@ -52,28 +59,40 @@ def test_image_generation_azure_dall_e_3():
|
|||
api_version="2023-12-01-preview",
|
||||
api_base=os.getenv("AZURE_SWEDEN_API_BASE"),
|
||||
api_key=os.getenv("AZURE_SWEDEN_API_KEY"),
|
||||
num_retries=3,
|
||||
)
|
||||
print(f"response: {response}")
|
||||
assert len(response.data) > 0
|
||||
except litellm.RateLimitError as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
||||
# test_image_generation_azure_dall_e_3()
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_image_generation_openai():
|
||||
try:
|
||||
response = litellm.image_generation(
|
||||
prompt="A cute baby sea otter", model="dall-e-3", num_retries=3
|
||||
prompt="A cute baby sea otter", model="dall-e-3"
|
||||
)
|
||||
print(f"response: {response}")
|
||||
assert len(response.data) > 0
|
||||
|
||||
except litellm.RateLimitError as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
# asyncio.run(test_async_image_generation_openai())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_image_generation_azure():
|
||||
try:
|
||||
response = await litellm.aimage_generation(
|
||||
prompt="A cute baby sea otter", model="azure/dall-e-3-test", num_retries=3
|
||||
prompt="A cute baby sea otter", model="azure/dall-e-3-test"
|
||||
)
|
||||
print(f"response: {response}")
|
||||
except litellm.RateLimitError as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
|
|
@ -307,7 +307,7 @@ def test_completion_ollama_hosted_stream():
|
|||
messages=messages,
|
||||
max_tokens=10,
|
||||
num_retries=3,
|
||||
timeout=90,
|
||||
timeout=20,
|
||||
api_base="https://test-ollama-endpoint.onrender.com",
|
||||
stream=True,
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue