From f05d5138e867987f9ab22b4e4ee85edf2987d8ec Mon Sep 17 00:00:00 2001 From: Charlie Doern Date: Wed, 26 Nov 2025 15:52:21 -0500 Subject: [PATCH] fix: rename some more usages change some more run.yaml into config.yaml references, alter some parameter names, etc Signed-off-by: Charlie Doern --- .github/workflows/test-external.yml | 6 +++--- docs/docs/concepts/apis/external.mdx | 4 ++-- src/llama_stack/cli/stack/_list_deps.py | 10 +++++----- src/llama_stack/core/conversations/conversations.py | 4 ++-- src/llama_stack/core/inspect.py | 8 ++++---- src/llama_stack/core/prompts/prompts.py | 4 ++-- src/llama_stack/core/providers.py | 4 ++-- src/llama_stack/core/stack.py | 10 +++++----- tests/external/{run-byoa.yaml => config.yaml} | 0 tests/unit/prompts/prompts/conftest.py | 2 +- 10 files changed, 26 insertions(+), 26 deletions(-) rename tests/external/{run-byoa.yaml => config.yaml} (100%) diff --git a/.github/workflows/test-external.yml b/.github/workflows/test-external.yml index fed3967ee..02544a9cd 100644 --- a/.github/workflows/test-external.yml +++ b/.github/workflows/test-external.yml @@ -44,14 +44,14 @@ jobs: - name: Print distro dependencies run: | - uv run --no-sync llama stack list-deps tests/external/run-byoa.yaml + uv run --no-sync llama stack list-deps tests/external/config.yaml - name: Build distro from config file run: | uv venv ci-test source ci-test/bin/activate uv pip install -e . - LLAMA_STACK_LOGGING=all=CRITICAL llama stack list-deps tests/external/run-byoa.yaml | xargs -L1 uv pip install + LLAMA_STACK_LOGGING=all=CRITICAL llama stack list-deps tests/external/config.yaml | xargs -L1 uv pip install - name: Start Llama Stack server in background if: ${{ matrix.image-type }} == 'venv' @@ -62,7 +62,7 @@ jobs: # Use the virtual environment created by the build step (name comes from build config) source ci-test/bin/activate uv pip list - nohup llama stack run tests/external/run-byoa.yaml > server.log 2>&1 & + nohup llama stack run tests/external/config.yaml > server.log 2>&1 & - name: Wait for Llama Stack server to be ready run: | diff --git a/docs/docs/concepts/apis/external.mdx b/docs/docs/concepts/apis/external.mdx index 005b85647..4c2c92410 100644 --- a/docs/docs/concepts/apis/external.mdx +++ b/docs/docs/concepts/apis/external.mdx @@ -337,7 +337,7 @@ uv pip install -e . 7. Configure Llama Stack to use the provider: ```yaml -# ~/.llama/run-byoa.yaml +# ~/.llama/config.yaml version: "2" image_name: "llama-stack-api-weather" apis: @@ -356,7 +356,7 @@ server: 8. Run the server: ```bash -llama stack run ~/.llama/run-byoa.yaml +llama stack run ~/.llama/config.yaml ``` 9. Test the API: diff --git a/src/llama_stack/cli/stack/_list_deps.py b/src/llama_stack/cli/stack/_list_deps.py index 80b67ce62..dbfb1bb51 100644 --- a/src/llama_stack/cli/stack/_list_deps.py +++ b/src/llama_stack/cli/stack/_list_deps.py @@ -78,7 +78,7 @@ def run_stack_list_deps_command(args: argparse.Namespace) -> None: with open(config_file) as f: try: contents = yaml.safe_load(f) - run_config = StackConfig(**contents) + config = StackConfig(**contents) except Exception as e: cprint( f"Could not parse config file {config_file}: {e}", @@ -119,16 +119,16 @@ def run_stack_list_deps_command(args: argparse.Namespace) -> None: file=sys.stderr, ) sys.exit(1) - run_config = StackConfig(providers=provider_list, image_name="providers-run") + config = StackConfig(providers=provider_list, image_name="providers-run") - normal_deps, special_deps, external_provider_dependencies = get_provider_dependencies(run_config) + normal_deps, special_deps, external_provider_dependencies = get_provider_dependencies(config) normal_deps += SERVER_DEPENDENCIES # Add external API dependencies - if run_config.external_apis_dir: + if config.external_apis_dir: from llama_stack.core.external import load_external_apis - external_apis = load_external_apis(run_config) + external_apis = load_external_apis(config) if external_apis: for _, api_spec in external_apis.items(): normal_deps.extend(api_spec.pip_packages) diff --git a/src/llama_stack/core/conversations/conversations.py b/src/llama_stack/core/conversations/conversations.py index 3e867721e..812156e09 100644 --- a/src/llama_stack/core/conversations/conversations.py +++ b/src/llama_stack/core/conversations/conversations.py @@ -36,7 +36,7 @@ class ConversationServiceConfig(BaseModel): :param policy: Access control rules """ - run_config: StackConfig + config: StackConfig policy: list[AccessRule] = [] @@ -56,7 +56,7 @@ class ConversationServiceImpl(Conversations): self.policy = config.policy # Use conversations store reference from run config - conversations_ref = config.run_config.storage.stores.conversations + conversations_ref = config.config.storage.stores.conversations if not conversations_ref: raise ValueError("storage.stores.conversations must be configured in run config") diff --git a/src/llama_stack/core/inspect.py b/src/llama_stack/core/inspect.py index 3b60027f0..f14326f2d 100644 --- a/src/llama_stack/core/inspect.py +++ b/src/llama_stack/core/inspect.py @@ -22,7 +22,7 @@ from llama_stack_api import ( class DistributionInspectConfig(BaseModel): - run_config: StackConfig + config: StackConfig async def get_provider_impl(config, deps): @@ -40,7 +40,7 @@ class DistributionInspectImpl(Inspect): pass async def list_routes(self, api_filter: str | None = None) -> ListRoutesResponse: - run_config: StackConfig = self.config.run_config + config: StackConfig = self.config.config # Helper function to determine if a route should be included based on api_filter def should_include_route(webmethod) -> bool: @@ -55,7 +55,7 @@ class DistributionInspectImpl(Inspect): return not webmethod.deprecated and webmethod.level == api_filter ret = [] - external_apis = load_external_apis(run_config) + external_apis = load_external_apis(config) all_endpoints = get_all_api_routes(external_apis) for api, endpoints in all_endpoints.items(): # Always include provider and inspect APIs, filter others based on run config @@ -72,7 +72,7 @@ class DistributionInspectImpl(Inspect): ] ) else: - providers = run_config.providers.get(api.value, []) + providers = config.providers.get(api.value, []) if providers: # Only process if there are providers for this API ret.extend( [ diff --git a/src/llama_stack/core/prompts/prompts.py b/src/llama_stack/core/prompts/prompts.py index 40539f342..44e560091 100644 --- a/src/llama_stack/core/prompts/prompts.py +++ b/src/llama_stack/core/prompts/prompts.py @@ -20,7 +20,7 @@ class PromptServiceConfig(BaseModel): :param run_config: Stack run configuration containing distribution info """ - run_config: StackConfig + config: StackConfig async def get_provider_impl(config: PromptServiceConfig, deps: dict[Any, Any]): @@ -40,7 +40,7 @@ class PromptServiceImpl(Prompts): async def initialize(self) -> None: # Use prompts store reference from run config - prompts_ref = self.config.run_config.storage.stores.prompts + prompts_ref = self.config.config.storage.stores.prompts if not prompts_ref: raise ValueError("storage.stores.prompts must be configured in run config") self.kvstore = await kvstore_impl(prompts_ref) diff --git a/src/llama_stack/core/providers.py b/src/llama_stack/core/providers.py index 1f0ecae6f..c758c65bc 100644 --- a/src/llama_stack/core/providers.py +++ b/src/llama_stack/core/providers.py @@ -19,7 +19,7 @@ logger = get_logger(name=__name__, category="core") class ProviderImplConfig(BaseModel): - run_config: StackConfig + config: StackConfig async def get_provider_impl(config, deps): @@ -41,7 +41,7 @@ class ProviderImpl(Providers): pass async def list_providers(self) -> ListProvidersResponse: - run_config = self.config.run_config + run_config = self.config safe_config = StackConfig(**redact_sensitive_fields(run_config.model_dump())) providers_health = await self.get_providers_health() ret = [] diff --git a/src/llama_stack/core/stack.py b/src/llama_stack/core/stack.py index 96f9eb8b9..9310bce41 100644 --- a/src/llama_stack/core/stack.py +++ b/src/llama_stack/core/stack.py @@ -341,7 +341,7 @@ def cast_image_name_to_string(config_dict: dict[str, Any]) -> dict[str, Any]: return config_dict -def add_internal_implementations(impls: dict[Api, Any], run_config: StackConfig) -> None: +def add_internal_implementations(impls: dict[Api, Any], config: StackConfig) -> None: """Add internal implementations (inspect and providers) to the implementations dictionary. Args: @@ -349,25 +349,25 @@ def add_internal_implementations(impls: dict[Api, Any], run_config: StackConfig) run_config: Stack run configuration """ inspect_impl = DistributionInspectImpl( - DistributionInspectConfig(run_config=run_config), + DistributionInspectConfig(config=config), deps=impls, ) impls[Api.inspect] = inspect_impl providers_impl = ProviderImpl( - ProviderImplConfig(run_config=run_config), + ProviderImplConfig(config=config), deps=impls, ) impls[Api.providers] = providers_impl prompts_impl = PromptServiceImpl( - PromptServiceConfig(run_config=run_config), + PromptServiceConfig(config=config), deps=impls, ) impls[Api.prompts] = prompts_impl conversations_impl = ConversationServiceImpl( - ConversationServiceConfig(run_config=run_config), + ConversationServiceConfig(config=config), deps=impls, ) impls[Api.conversations] = conversations_impl diff --git a/tests/external/run-byoa.yaml b/tests/external/config.yaml similarity index 100% rename from tests/external/run-byoa.yaml rename to tests/external/config.yaml diff --git a/tests/unit/prompts/prompts/conftest.py b/tests/unit/prompts/prompts/conftest.py index 8ed5b429a..bd3fcc785 100644 --- a/tests/unit/prompts/prompts/conftest.py +++ b/tests/unit/prompts/prompts/conftest.py @@ -47,7 +47,7 @@ async def temp_prompt_store(tmp_path_factory): providers={}, storage=storage, ) - config = PromptServiceConfig(run_config=mock_run_config) + config = PromptServiceConfig(config=mock_run_config) store = PromptServiceImpl(config, deps={}) register_kvstore_backends({"kv_test": storage.backends["kv_test"]})