mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(test) router - set sync stream client
This commit is contained in:
parent
a4cf4e7ca9
commit
11a8713a50
1 changed files with 30 additions and 1 deletions
|
@ -708,6 +708,35 @@ def test_reading_keys_os_environ():
|
||||||
async_client: openai.AsyncAzureOpenAI = model["async_client"]
|
async_client: openai.AsyncAzureOpenAI = model["async_client"]
|
||||||
assert async_client.api_key == os.environ["AZURE_API_KEY"]
|
assert async_client.api_key == os.environ["AZURE_API_KEY"]
|
||||||
assert async_client.base_url == os.environ["AZURE_API_BASE"]
|
assert async_client.base_url == os.environ["AZURE_API_BASE"]
|
||||||
|
assert async_client.max_retries == (os.environ["AZURE_MAX_RETRIES"]), f"{async_client.max_retries} vs {os.environ['AZURE_MAX_RETRIES']}"
|
||||||
|
assert async_client.timeout == (os.environ["AZURE_TIMEOUT"]), f"{async_client.timeout} vs {os.environ['AZURE_TIMEOUT']}"
|
||||||
|
print("async client set correctly!")
|
||||||
|
|
||||||
|
print("\n Testing async streaming client")
|
||||||
|
|
||||||
|
stream_async_client: openai.AsyncAzureOpenAI = model["stream_async_client"]
|
||||||
|
assert stream_async_client.api_key == os.environ["AZURE_API_KEY"]
|
||||||
|
assert stream_async_client.base_url == os.environ["AZURE_API_BASE"]
|
||||||
|
assert stream_async_client.max_retries == (os.environ["AZURE_MAX_RETRIES"]), f"{stream_async_client.max_retries} vs {os.environ['AZURE_MAX_RETRIES']}"
|
||||||
|
assert stream_async_client.timeout == (os.environ["AZURE_STREAM_TIMEOUT"]), f"{stream_async_client.timeout} vs {os.environ['AZURE_TIMEOUT']}"
|
||||||
|
print("async stream client set correctly!")
|
||||||
|
|
||||||
|
print("\n Testing sync client")
|
||||||
|
client: openai.AzureOpenAI = model["client"]
|
||||||
|
assert client.api_key == os.environ["AZURE_API_KEY"]
|
||||||
|
assert client.base_url == os.environ["AZURE_API_BASE"]
|
||||||
|
assert client.max_retries == (os.environ["AZURE_MAX_RETRIES"]), f"{client.max_retries} vs {os.environ['AZURE_MAX_RETRIES']}"
|
||||||
|
assert client.timeout == (os.environ["AZURE_TIMEOUT"]), f"{client.timeout} vs {os.environ['AZURE_TIMEOUT']}"
|
||||||
|
print("sync client set correctly!")
|
||||||
|
|
||||||
|
print("\n Testing sync stream client")
|
||||||
|
stream_client: openai.AzureOpenAI = model["stream_client"]
|
||||||
|
assert stream_client.api_key == os.environ["AZURE_API_KEY"]
|
||||||
|
assert stream_client.base_url == os.environ["AZURE_API_BASE"]
|
||||||
|
assert stream_client.max_retries == (os.environ["AZURE_MAX_RETRIES"]), f"{stream_client.max_retries} vs {os.environ['AZURE_MAX_RETRIES']}"
|
||||||
|
assert stream_client.timeout == (os.environ["AZURE_STREAM_TIMEOUT"]), f"{stream_client.timeout} vs {os.environ['AZURE_TIMEOUT']}"
|
||||||
|
print("sync stream client set correctly!")
|
||||||
|
|
||||||
router.reset()
|
router.reset()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue