mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
test fix azure deprecated mistral ai
This commit is contained in:
parent
bd88263b29
commit
b3f37b860d
2 changed files with 0 additions and 72 deletions
|
@ -132,36 +132,6 @@ def test_null_role_response():
|
|||
assert response.choices[0].message.role == "assistant"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sync_mode", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_completion_azure_ai_mistral_invalid_params(sync_mode):
|
||||
try:
|
||||
import os
|
||||
|
||||
litellm.set_verbose = True
|
||||
|
||||
os.environ["AZURE_AI_API_BASE"] = os.getenv("AZURE_MISTRAL_API_BASE", "")
|
||||
os.environ["AZURE_AI_API_KEY"] = os.getenv("AZURE_MISTRAL_API_KEY", "")
|
||||
|
||||
data = {
|
||||
"model": "azure_ai/mistral",
|
||||
"messages": [{"role": "user", "content": "What is the meaning of life?"}],
|
||||
"frequency_penalty": 0.1,
|
||||
"presence_penalty": 0.1,
|
||||
"drop_params": True,
|
||||
}
|
||||
if sync_mode:
|
||||
response: litellm.ModelResponse = completion(**data) # type: ignore
|
||||
else:
|
||||
response: litellm.ModelResponse = await litellm.acompletion(**data) # type: ignore
|
||||
|
||||
assert "azure_ai" in response.model
|
||||
except litellm.Timeout as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_azure_command_r():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
|
|
|
@ -2948,48 +2948,6 @@ def test_azure_streaming_and_function_calling():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sync_mode", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_completion_azure_ai_mistral_invalid_params(sync_mode):
|
||||
try:
|
||||
import os
|
||||
from litellm import stream_chunk_builder
|
||||
|
||||
litellm.set_verbose = True
|
||||
|
||||
os.environ["AZURE_AI_API_BASE"] = os.getenv("AZURE_MISTRAL_API_BASE", "")
|
||||
os.environ["AZURE_AI_API_KEY"] = os.getenv("AZURE_MISTRAL_API_KEY", "")
|
||||
|
||||
data = {
|
||||
"model": "azure_ai/mistral",
|
||||
"messages": [{"role": "user", "content": "What is the meaning of life?"}],
|
||||
"frequency_penalty": 0.1,
|
||||
"presence_penalty": 0.1,
|
||||
"drop_params": True,
|
||||
"stream": True,
|
||||
}
|
||||
chunks = []
|
||||
if sync_mode:
|
||||
response = completion(**data) # type: ignore
|
||||
for chunk in response:
|
||||
print(chunk)
|
||||
chunks.append(chunk)
|
||||
else:
|
||||
response = await litellm.acompletion(**data) # type: ignore
|
||||
|
||||
async for chunk in response:
|
||||
print(chunk)
|
||||
chunks.append(chunk)
|
||||
print(f"chunks: {chunks}")
|
||||
response = stream_chunk_builder(chunks=chunks)
|
||||
assert response.choices[0].message.content is not None
|
||||
except litellm.Timeout as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_azure_astreaming_and_function_calling():
|
||||
import uuid
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue