fix(router.py): support cloudflare ai gateway for azure models on router

This commit is contained in:
Krrish Dholakia 2023-11-30 14:08:52 -08:00
parent 936c27c9ee
commit 032f71adb2
3 changed files with 50 additions and 32 deletions

View file

@ -65,7 +65,7 @@ def test_async_response_azure():
user_message = "What do you know?"
messages = [{"content": user_message, "role": "user"}]
try:
response = await acompletion(model="azure/chatgpt-v-2", messages=messages, timeout=5)
response = await acompletion(model="azure/gpt-turbo", messages=messages, base_url=os.getenv("CLOUDFLARE_AZURE_BASE_URL"), api_key=os.getenv("AZURE_FRANCE_API_KEY"))
print(f"response: {response}")
except litellm.Timeout as e:
pass
@ -76,6 +76,7 @@ def test_async_response_azure():
# test_async_response_azure()
def test_async_anyscale_response():
import asyncio
litellm.set_verbose = True