Compare commits

...
Sign in to create a new pull request.

5 commits

Author SHA1 Message Date
Ishaan Jaff
c94f9f3b1e test_audio_speech_router 2024-11-28 20:21:26 -08:00
Ishaan Jaff
e2787eeefe
Merge branch 'main' into litellm_fix_router_aspeech 2024-11-28 20:18:52 -08:00
Ishaan Jaff
edef33abb2 test_audio_speech_router 2024-11-28 20:17:37 -08:00
Ishaan Jaff
97cd5526ae fix aspeech on router 2024-11-28 20:17:19 -08:00
Ishaan Jaff
7901eee0b7 doc Migrating Databases 2024-11-28 07:33:43 -08:00
3 changed files with 74 additions and 3 deletions

View file

@ -69,3 +69,24 @@ When disabling spend logs (`disable_spend_logs: True`):
When disabling error logs (`disable_error_logs: True`):
- You **will not** be able to view Errors on the LiteLLM UI
- You **will** continue seeing error logs in your application logs and any other logging integrations you are using
## Migrating Databases
If you need to migrate Databases the following Tables should be copied to ensure continuation of services and no downtime
| Table Name | Description |
|------------|-------------|
| LiteLLM_VerificationToken | **Required** to ensure existing virtual keys continue working |
| LiteLLM_UserTable | **Required** to ensure existing virtual keys continue working |
| LiteLLM_TeamTable | **Required** to ensure Teams are migrated |
| LiteLLM_TeamMembership | **Required** to ensure Teams member budgets are migrated |
| LiteLLM_BudgetTable | **Required** to migrate existing budgeting settings |
| LiteLLM_OrganizationTable | **Optional** Only migrate if you use Organizations in DB |
| LiteLLM_OrganizationMembership | **Optional** Only migrate if you use Organizations in DB |
| LiteLLM_ProxyModelTable | **Optional** Only migrate if you store your LLMs in the DB (i.e you set `STORE_MODEL_IN_DB=True`) |
| LiteLLM_SpendLogs | **Optional** Only migrate if you want historical data on LiteLLM UI |
| LiteLLM_ErrorLogs | **Optional** Only migrate if you want historical data on LiteLLM UI |

View file

@ -1689,11 +1689,15 @@ class Router:
and potential_model_client is not None
and dynamic_api_key != potential_model_client.api_key
):
pass
model_client = None
else:
pass
model_client = potential_model_client
response = await litellm.aspeech(**data, **kwargs)
response = await litellm.aspeech(
**data,
client=model_client,
**kwargs,
)
return response
except Exception as e:

View file

@ -20,6 +20,7 @@ sys.path.insert(
) # Adds the parent directory to the system path
import litellm
from litellm import APIConnectionError, Router
from unittest.mock import ANY
async def test_router_init():
@ -213,3 +214,48 @@ def test_router_init_azure_service_principal_with_secret_with_environment_variab
# asyncio.run(test_router_init())
@pytest.mark.asyncio
async def test_audio_speech_router():
"""
Test that router uses OpenAI/Azure OpenAI Client initialized during init for litellm.aspeech
"""
from litellm import Router
litellm.set_verbose = True
model_list = [
{
"model_name": "tts",
"litellm_params": {
"model": "azure/azure-tts",
"api_base": os.getenv("AZURE_SWEDEN_API_BASE"),
"api_key": os.getenv("AZURE_SWEDEN_API_KEY"),
},
},
]
_router = Router(model_list=model_list)
expected_openai_client = _router._get_client(
deployment=_router.model_list[0],
kwargs={},
client_type="async",
)
with patch("litellm.aspeech") as mock_aspeech:
await _router.aspeech(
model="tts",
voice="alloy",
input="the quick brown fox jumped over the lazy dogs",
)
print(
"litellm.aspeech was called with kwargs = ", mock_aspeech.call_args.kwargs
)
# Get the actual client that was passed
client_passed_in_request = mock_aspeech.call_args.kwargs["client"]
assert client_passed_in_request == expected_openai_client