(test) test_reading proxy

This commit is contained in:
ishaan-jaff 2023-12-04 13:24:35 -08:00
parent a99f471d29
commit 50284771b7
2 changed files with 13 additions and 7 deletions

View file

@ -4,14 +4,12 @@ model_list:
model: azure/chatgpt-v-2
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_version: "2023-05-15"
azure_ad_token: eyJ0eXAiOiJ
api_key: os.environ/AZURE_API_KEY
tpm: 20_000
- model_name: gpt-4-team2
litellm_params:
model: azure/gpt-4
api_key: sk-123
api_key: os.environ/AZURE_API_KEY
api_base: https://openai-gpt-4-test-v-2.openai.azure.com/
- model_name: gpt-4-team3
litellm_params:
model: azure/gpt-4
api_key: sk-123
tpm: 100_000

View file

@ -172,9 +172,17 @@ from litellm.proxy.proxy_server import load_router_config
def test_load_router_config():
try:
print("testing reading config")
# this is a basic config.yaml with only a model
result = load_router_config(router=None, config_file_path="../proxy/example_config_yaml/simple_config.yaml")
print(result)
assert len(result[1]) == 1
# this is a load balancing config yaml
result = load_router_config(router=None, config_file_path="../proxy/example_config_yaml/azure_config.yaml")
print(result)
assert len(result[1]) == 2
except Exception as e:
pytest.fail("Proxy: Got exception reading config", e)
# test_load_router_config()
test_load_router_config()