Compare commits

...
Sign in to create a new pull request.

3 commits

Author SHA1 Message Date
Ishaan Jaff
241fcf77b5 unit tests for include on config.yaml 2024-11-26 12:53:44 -08:00
Ishaan Jaff
4a465aff1c add doc on config management 2024-11-26 12:42:14 -08:00
Ishaan Jaff
50dccce508 add helper to process inlcudes directive on yaml 2024-11-26 12:13:28 -08:00
13 changed files with 226 additions and 25 deletions

View file

@ -0,0 +1,59 @@
# File Management
## `include` external YAML files in a config.yaml
You can use `include` to include external YAML files in a config.yaml.
**Quick Start Usage:**
To include a config file, use `include` with either a single file or a list of files.
Contents of `parent_config.yaml`:
```yaml
include:
- model_config.yaml # 👈 Key change, will include the contents of model_config.yaml
litellm_settings:
callbacks: ["prometheus"]
```
Contents of `model_config.yaml`:
```yaml
model_list:
- model_name: gpt-4o
litellm_params:
model: openai/gpt-4o
api_base: https://exampleopenaiendpoint-production.up.railway.app/
- model_name: fake-anthropic-endpoint
litellm_params:
model: anthropic/fake
api_base: https://exampleanthropicendpoint-production.up.railway.app/
```
Start proxy server
This will start the proxy server with config `parent_config.yaml`. Since the `include` directive is used, the server will also include the contents of `model_config.yaml`.
```
litellm --config parent_config.yaml --detailed_debug
```
## Examples using `include`
Include a single file:
```yaml
include:
- model_config.yaml
```
Include multiple files:
```yaml
include:
- model_config.yaml
- another_config.yaml
```

View file

@ -2,7 +2,7 @@ import Image from '@theme/IdealImage';
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# Proxy Config.yaml
# Overview
Set model list, `api_base`, `api_key`, `temperature` & proxy server settings (`master-key`) on the config.yaml.
| Param Name | Description |

View file

@ -32,7 +32,7 @@ const sidebars = {
{
"type": "category",
"label": "Config.yaml",
"items": ["proxy/configs", "proxy/config_settings"]
"items": ["proxy/configs", "proxy/config_management", "proxy/config_settings"]
},
{
type: "category",

View file

@ -0,0 +1,10 @@
model_list:
- model_name: gpt-4o
litellm_params:
model: openai/gpt-4o
api_base: https://exampleopenaiendpoint-production.up.railway.app/
- model_name: fake-anthropic-endpoint
litellm_params:
model: anthropic/fake
api_base: https://exampleanthropicendpoint-production.up.railway.app/

View file

@ -1,25 +1,5 @@
model_list:
- model_name: gpt-4o
litellm_params:
model: openai/gpt-4o
api_base: https://exampleopenaiendpoint-production.up.railway.app/
- model_name: fake-anthropic-endpoint
litellm_params:
model: anthropic/fake
api_base: https://exampleanthropicendpoint-production.up.railway.app/
router_settings:
provider_budget_config:
openai:
budget_limit: 0.3 # float of $ value budget for time period
time_period: 1d # can be 1d, 2d, 30d
anthropic:
budget_limit: 5
time_period: 1d
redis_host: os.environ/REDIS_HOST
redis_port: os.environ/REDIS_PORT
redis_password: os.environ/REDIS_PASSWORD
include:
- model_config.yaml
litellm_settings:
callbacks: ["prometheus"]
success_callback: ["langfuse"]
callbacks: ["prometheus"]

View file

@ -1377,6 +1377,16 @@ class ProxyConfig:
_, file_extension = os.path.splitext(config_file_path)
return file_extension.lower() == ".yaml" or file_extension.lower() == ".yml"
def _load_yaml_file(self, file_path: str) -> dict:
"""
Load and parse a YAML file
"""
try:
with open(file_path, "r") as file:
return yaml.safe_load(file) or {}
except Exception as e:
raise Exception(f"Error loading yaml file {file_path}: {str(e)}")
async def _get_config_from_file(
self, config_file_path: Optional[str] = None
) -> dict:
@ -1407,6 +1417,51 @@ class ProxyConfig:
"litellm_settings": {},
}
# Process includes
config = self._process_includes(
config=config, base_dir=os.path.dirname(os.path.abspath(file_path or ""))
)
verbose_proxy_logger.debug(f"loaded config={json.dumps(config, indent=4)}")
return config
def _process_includes(self, config: dict, base_dir: str) -> dict:
"""
Process includes by appending their contents to the main config
Handles nested config.yamls with `include` section
Example config: This will get the contents from files in `include` and append it
```yaml
include:
- model_config.yaml
litellm_settings:
callbacks: ["prometheus"]
```
"""
if "include" not in config:
return config
if not isinstance(config["include"], list):
raise ValueError("'include' must be a list of file paths")
# Load and append all included files
for include_file in config["include"]:
file_path = os.path.join(base_dir, include_file)
if not os.path.exists(file_path):
raise FileNotFoundError(f"Included file not found: {file_path}")
included_config = self._load_yaml_file(file_path)
# Simply update/extend the main config with included config
for key, value in included_config.items():
if isinstance(value, list) and key in config:
config[key].extend(value)
else:
config[key] = value
# Remove the include directive
del config["include"]
return config
async def save_config(self, new_config: dict):

View file

@ -0,0 +1,5 @@
include:
- included_models.yaml
litellm_settings:
callbacks: ["prometheus"]

View file

@ -0,0 +1,5 @@
include:
- non-existent-file.yaml
litellm_settings:
callbacks: ["prometheus"]

View file

@ -0,0 +1,6 @@
include:
- models_file_1.yaml
- models_file_2.yaml
litellm_settings:
callbacks: ["prometheus"]

View file

@ -0,0 +1,4 @@
model_list:
- model_name: included-model
litellm_params:
model: gpt-4

View file

@ -0,0 +1,4 @@
model_list:
- model_name: included-model-1
litellm_params:
model: gpt-4

View file

@ -0,0 +1,4 @@
model_list:
- model_name: included-model-2
litellm_params:
model: gpt-3.5-turbo

View file

@ -23,6 +23,8 @@ import logging
from litellm.proxy.proxy_server import ProxyConfig
INVALID_FILES = ["config_with_missing_include.yaml"]
@pytest.mark.asyncio
async def test_basic_reading_configs_from_files():
@ -38,6 +40,9 @@ async def test_basic_reading_configs_from_files():
print(files)
for file in files:
if file in INVALID_FILES: # these are intentionally invalid files
continue
print("reading file=", file)
config_path = os.path.join(example_config_yaml_path, file)
config = await proxy_config_instance.get_config(config_file_path=config_path)
print(config)
@ -115,3 +120,67 @@ async def test_read_config_file_with_os_environ_vars():
os.environ[key] = _old_env_vars[key]
else:
del os.environ[key]
@pytest.mark.asyncio
async def test_basic_include_directive():
"""
Test that the include directive correctly loads and merges configs
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_include.yaml"
)
config = await proxy_config_instance.get_config(config_file_path=config_path)
# Verify the included model list was merged
assert len(config["model_list"]) > 0
assert any(
model["model_name"] == "included-model" for model in config["model_list"]
)
# Verify original config settings remain
assert config["litellm_settings"]["callbacks"] == ["prometheus"]
@pytest.mark.asyncio
async def test_missing_include_file():
"""
Test that a missing included file raises FileNotFoundError
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_missing_include.yaml"
)
with pytest.raises(FileNotFoundError):
await proxy_config_instance.get_config(config_file_path=config_path)
@pytest.mark.asyncio
async def test_multiple_includes():
"""
Test that multiple files in the include list are all processed correctly
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_multiple_includes.yaml"
)
config = await proxy_config_instance.get_config(config_file_path=config_path)
# Verify models from both included files are present
assert len(config["model_list"]) == 2
assert any(
model["model_name"] == "included-model-1" for model in config["model_list"]
)
assert any(
model["model_name"] == "included-model-2" for model in config["model_list"]
)
# Verify original config settings remain
assert config["litellm_settings"]["callbacks"] == ["prometheus"]