mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(test) test_reading proxy
This commit is contained in:
parent
a99f471d29
commit
50284771b7
2 changed files with 13 additions and 7 deletions
|
@ -172,9 +172,17 @@ from litellm.proxy.proxy_server import load_router_config
|
|||
def test_load_router_config():
|
||||
try:
|
||||
print("testing reading config")
|
||||
# this is a basic config.yaml with only a model
|
||||
result = load_router_config(router=None, config_file_path="../proxy/example_config_yaml/simple_config.yaml")
|
||||
print(result)
|
||||
assert len(result[1]) == 1
|
||||
|
||||
# this is a load balancing config yaml
|
||||
result = load_router_config(router=None, config_file_path="../proxy/example_config_yaml/azure_config.yaml")
|
||||
print(result)
|
||||
assert len(result[1]) == 2
|
||||
|
||||
|
||||
except Exception as e:
|
||||
pytest.fail("Proxy: Got exception reading config", e)
|
||||
# test_load_router_config()
|
||||
test_load_router_config()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue