diff --git a/docs/my-website/docs/proxy/config_management.md b/docs/my-website/docs/proxy/config_management.md deleted file mode 100644 index 4f7c5775b..000000000 --- a/docs/my-website/docs/proxy/config_management.md +++ /dev/null @@ -1,59 +0,0 @@ -# File Management - -## `include` external YAML files in a config.yaml - -You can use `include` to include external YAML files in a config.yaml. - -**Quick Start Usage:** - -To include a config file, use `include` with either a single file or a list of files. - -Contents of `parent_config.yaml`: -```yaml -include: - - model_config.yaml # 👈 Key change, will include the contents of model_config.yaml - -litellm_settings: - callbacks: ["prometheus"] -``` - - -Contents of `model_config.yaml`: -```yaml -model_list: - - model_name: gpt-4o - litellm_params: - model: openai/gpt-4o - api_base: https://exampleopenaiendpoint-production.up.railway.app/ - - model_name: fake-anthropic-endpoint - litellm_params: - model: anthropic/fake - api_base: https://exampleanthropicendpoint-production.up.railway.app/ - -``` - -Start proxy server - -This will start the proxy server with config `parent_config.yaml`. Since the `include` directive is used, the server will also include the contents of `model_config.yaml`. -``` -litellm --config parent_config.yaml --detailed_debug -``` - - - - - -## Examples using `include` - -Include a single file: -```yaml -include: - - model_config.yaml -``` - -Include multiple files: -```yaml -include: - - model_config.yaml - - another_config.yaml -``` \ No newline at end of file diff --git a/docs/my-website/docs/proxy/configs.md b/docs/my-website/docs/proxy/configs.md index ccb9872d6..4bd4d2666 100644 --- a/docs/my-website/docs/proxy/configs.md +++ b/docs/my-website/docs/proxy/configs.md @@ -2,7 +2,7 @@ import Image from '@theme/IdealImage'; import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# Overview +# Proxy Config.yaml Set model list, `api_base`, `api_key`, `temperature` & proxy server settings (`master-key`) on the config.yaml. | Param Name | Description | diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 1deb0dd75..879175db6 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -32,7 +32,7 @@ const sidebars = { { "type": "category", "label": "Config.yaml", - "items": ["proxy/configs", "proxy/config_management", "proxy/config_settings"] + "items": ["proxy/configs", "proxy/config_settings"] }, { type: "category", diff --git a/litellm/proxy/model_config.yaml b/litellm/proxy/model_config.yaml deleted file mode 100644 index a0399c095..000000000 --- a/litellm/proxy/model_config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -model_list: - - model_name: gpt-4o - litellm_params: - model: openai/gpt-4o - api_base: https://exampleopenaiendpoint-production.up.railway.app/ - - model_name: fake-anthropic-endpoint - litellm_params: - model: anthropic/fake - api_base: https://exampleanthropicendpoint-production.up.railway.app/ - diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 968cb8b39..2cf300da4 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -1,5 +1,24 @@ -include: - - model_config.yaml +model_list: + - model_name: gpt-4o + litellm_params: + model: openai/gpt-4o + api_base: https://exampleopenaiendpoint-production.up.railway.app/ + - model_name: fake-anthropic-endpoint + litellm_params: + model: anthropic/fake + api_base: https://exampleanthropicendpoint-production.up.railway.app/ + +router_settings: + provider_budget_config: + openai: + budget_limit: 0.3 # float of $ value budget for time period + time_period: 1d # can be 1d, 2d, 30d + anthropic: + budget_limit: 5 + time_period: 1d + redis_host: os.environ/REDIS_HOST + redis_port: os.environ/REDIS_PORT + redis_password: os.environ/REDIS_PASSWORD litellm_settings: callbacks: ["datadog"] diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 15971263a..afb83aa37 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -1377,16 +1377,6 @@ class ProxyConfig: _, file_extension = os.path.splitext(config_file_path) return file_extension.lower() == ".yaml" or file_extension.lower() == ".yml" - def _load_yaml_file(self, file_path: str) -> dict: - """ - Load and parse a YAML file - """ - try: - with open(file_path, "r") as file: - return yaml.safe_load(file) or {} - except Exception as e: - raise Exception(f"Error loading yaml file {file_path}: {str(e)}") - async def _get_config_from_file( self, config_file_path: Optional[str] = None ) -> dict: @@ -1417,51 +1407,6 @@ class ProxyConfig: "litellm_settings": {}, } - # Process includes - config = self._process_includes( - config=config, base_dir=os.path.dirname(os.path.abspath(file_path or "")) - ) - - verbose_proxy_logger.debug(f"loaded config={json.dumps(config, indent=4)}") - return config - - def _process_includes(self, config: dict, base_dir: str) -> dict: - """ - Process includes by appending their contents to the main config - - Handles nested config.yamls with `include` section - - Example config: This will get the contents from files in `include` and append it - ```yaml - include: - - model_config.yaml - - litellm_settings: - callbacks: ["prometheus"] - ``` - """ - if "include" not in config: - return config - - if not isinstance(config["include"], list): - raise ValueError("'include' must be a list of file paths") - - # Load and append all included files - for include_file in config["include"]: - file_path = os.path.join(base_dir, include_file) - if not os.path.exists(file_path): - raise FileNotFoundError(f"Included file not found: {file_path}") - - included_config = self._load_yaml_file(file_path) - # Simply update/extend the main config with included config - for key, value in included_config.items(): - if isinstance(value, list) and key in config: - config[key].extend(value) - else: - config[key] = value - - # Remove the include directive - del config["include"] return config async def save_config(self, new_config: dict): diff --git a/tests/proxy_unit_tests/example_config_yaml/config_with_include.yaml b/tests/proxy_unit_tests/example_config_yaml/config_with_include.yaml deleted file mode 100644 index 0a0c9434b..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/config_with_include.yaml +++ /dev/null @@ -1,5 +0,0 @@ -include: - - included_models.yaml - -litellm_settings: - callbacks: ["prometheus"] \ No newline at end of file diff --git a/tests/proxy_unit_tests/example_config_yaml/config_with_missing_include.yaml b/tests/proxy_unit_tests/example_config_yaml/config_with_missing_include.yaml deleted file mode 100644 index 40d3e9e7f..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/config_with_missing_include.yaml +++ /dev/null @@ -1,5 +0,0 @@ -include: - - non-existent-file.yaml - -litellm_settings: - callbacks: ["prometheus"] \ No newline at end of file diff --git a/tests/proxy_unit_tests/example_config_yaml/config_with_multiple_includes.yaml b/tests/proxy_unit_tests/example_config_yaml/config_with_multiple_includes.yaml deleted file mode 100644 index c46adacd7..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/config_with_multiple_includes.yaml +++ /dev/null @@ -1,6 +0,0 @@ -include: - - models_file_1.yaml - - models_file_2.yaml - -litellm_settings: - callbacks: ["prometheus"] \ No newline at end of file diff --git a/tests/proxy_unit_tests/example_config_yaml/included_models.yaml b/tests/proxy_unit_tests/example_config_yaml/included_models.yaml deleted file mode 100644 index c1526b203..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/included_models.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model_list: - - model_name: included-model - litellm_params: - model: gpt-4 \ No newline at end of file diff --git a/tests/proxy_unit_tests/example_config_yaml/models_file_1.yaml b/tests/proxy_unit_tests/example_config_yaml/models_file_1.yaml deleted file mode 100644 index 344f67128..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/models_file_1.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model_list: - - model_name: included-model-1 - litellm_params: - model: gpt-4 \ No newline at end of file diff --git a/tests/proxy_unit_tests/example_config_yaml/models_file_2.yaml b/tests/proxy_unit_tests/example_config_yaml/models_file_2.yaml deleted file mode 100644 index 56bc3b1aa..000000000 --- a/tests/proxy_unit_tests/example_config_yaml/models_file_2.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model_list: - - model_name: included-model-2 - litellm_params: - model: gpt-3.5-turbo \ No newline at end of file diff --git a/tests/proxy_unit_tests/test_proxy_config_unit_test.py b/tests/proxy_unit_tests/test_proxy_config_unit_test.py index e9923e89d..bb51ce726 100644 --- a/tests/proxy_unit_tests/test_proxy_config_unit_test.py +++ b/tests/proxy_unit_tests/test_proxy_config_unit_test.py @@ -23,8 +23,6 @@ import logging from litellm.proxy.proxy_server import ProxyConfig -INVALID_FILES = ["config_with_missing_include.yaml"] - @pytest.mark.asyncio async def test_basic_reading_configs_from_files(): @@ -40,9 +38,6 @@ async def test_basic_reading_configs_from_files(): print(files) for file in files: - if file in INVALID_FILES: # these are intentionally invalid files - continue - print("reading file=", file) config_path = os.path.join(example_config_yaml_path, file) config = await proxy_config_instance.get_config(config_file_path=config_path) print(config) @@ -120,67 +115,3 @@ async def test_read_config_file_with_os_environ_vars(): os.environ[key] = _old_env_vars[key] else: del os.environ[key] - - -@pytest.mark.asyncio -async def test_basic_include_directive(): - """ - Test that the include directive correctly loads and merges configs - """ - proxy_config_instance = ProxyConfig() - current_path = os.path.dirname(os.path.abspath(__file__)) - config_path = os.path.join( - current_path, "example_config_yaml", "config_with_include.yaml" - ) - - config = await proxy_config_instance.get_config(config_file_path=config_path) - - # Verify the included model list was merged - assert len(config["model_list"]) > 0 - assert any( - model["model_name"] == "included-model" for model in config["model_list"] - ) - - # Verify original config settings remain - assert config["litellm_settings"]["callbacks"] == ["prometheus"] - - -@pytest.mark.asyncio -async def test_missing_include_file(): - """ - Test that a missing included file raises FileNotFoundError - """ - proxy_config_instance = ProxyConfig() - current_path = os.path.dirname(os.path.abspath(__file__)) - config_path = os.path.join( - current_path, "example_config_yaml", "config_with_missing_include.yaml" - ) - - with pytest.raises(FileNotFoundError): - await proxy_config_instance.get_config(config_file_path=config_path) - - -@pytest.mark.asyncio -async def test_multiple_includes(): - """ - Test that multiple files in the include list are all processed correctly - """ - proxy_config_instance = ProxyConfig() - current_path = os.path.dirname(os.path.abspath(__file__)) - config_path = os.path.join( - current_path, "example_config_yaml", "config_with_multiple_includes.yaml" - ) - - config = await proxy_config_instance.get_config(config_file_path=config_path) - - # Verify models from both included files are present - assert len(config["model_list"]) == 2 - assert any( - model["model_name"] == "included-model-1" for model in config["model_list"] - ) - assert any( - model["model_name"] == "included-model-2" for model in config["model_list"] - ) - - # Verify original config settings remain - assert config["litellm_settings"]["callbacks"] == ["prometheus"]