forked from phoenix/litellm-mirror
(test) pass client to Azure completion
This commit is contained in:
parent
400a268934
commit
01c38d37fa
1 changed files with 5 additions and 5 deletions
|
@ -591,20 +591,20 @@ async def test_re_use_azure_async_client():
|
||||||
litellm.set_verbose=True
|
litellm.set_verbose=True
|
||||||
import openai
|
import openai
|
||||||
client = openai.AsyncAzureOpenAI(
|
client = openai.AsyncAzureOpenAI(
|
||||||
azure_endpoint="https://openai-france-1234.openai.azure.com",
|
azure_endpoint=os.environ['AZURE_API_BASE'],
|
||||||
api_key=os.environ["AZURE_FRANCE_API_KEY"],
|
api_key=os.environ["AZURE_API_KEY"],
|
||||||
api_version="2023-07-01-preview",
|
api_version="2023-07-01-preview",
|
||||||
)
|
)
|
||||||
## Test azure call
|
## Test azure call
|
||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
response = await litellm.acompletion(
|
response = await litellm.acompletion(
|
||||||
model="azure/gpt-turbo",
|
model="azure/chatgpt-v-2",
|
||||||
messages=messages,
|
messages=messages,
|
||||||
azure_client=client
|
client=client
|
||||||
)
|
)
|
||||||
print(f"response: {response}")
|
print(f"response: {response}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("got Exception", e)
|
pytest.fail("got Exception", e)
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
asyncio.run(
|
asyncio.run(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue