mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-30 20:00:00 +00:00
refactor: Add ProviderContext for a flexible storage directory
- Introduce ProviderContext class to decouple provider storage paths from absolute paths - Add storage_dir attribute to StackRunConfig to accept CLI options - Implement storage directory resolution with prioritized fallbacks: 1. CLI option (--state-directory) 2. Environment variable (LLAMA_STACK_STATE_DIR) 3. Default distribution directory - Standardize provider signatures to follow context, config, deps pattern - Update provider implementations to use the new context-based approach - Add comprehensive tests to verify state directory resolution
This commit is contained in:
parent
dd07c7a5b5
commit
e6c9aebe47
41 changed files with 242 additions and 81 deletions
|
|
@ -6,14 +6,16 @@
|
|||
|
||||
from typing import Any
|
||||
|
||||
from llama_stack.providers.datatypes import ProviderContext
|
||||
|
||||
from .config import LlamaGuardConfig
|
||||
|
||||
|
||||
async def get_provider_impl(config: LlamaGuardConfig, deps: dict[str, Any]):
|
||||
async def get_provider_impl(context: ProviderContext, config: LlamaGuardConfig, deps: dict[str, Any]):
|
||||
from .llama_guard import LlamaGuardSafetyImpl
|
||||
|
||||
assert isinstance(config, LlamaGuardConfig), f"Unexpected config type: {type(config)}"
|
||||
|
||||
impl = LlamaGuardSafetyImpl(config, deps)
|
||||
impl = LlamaGuardSafetyImpl(context, config, deps)
|
||||
await impl.initialize()
|
||||
return impl
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ from llama_stack.apis.shields import Shield
|
|||
from llama_stack.distribution.datatypes import Api
|
||||
from llama_stack.models.llama.datatypes import Role
|
||||
from llama_stack.models.llama.sku_types import CoreModelId
|
||||
from llama_stack.providers.datatypes import ShieldsProtocolPrivate
|
||||
from llama_stack.providers.datatypes import ProviderContext, ShieldsProtocolPrivate
|
||||
from llama_stack.providers.utils.inference.prompt_adapter import (
|
||||
interleaved_content_as_str,
|
||||
)
|
||||
|
|
@ -130,7 +130,8 @@ PROMPT_TEMPLATE = Template(f"{PROMPT_TASK}{SAFETY_CATEGORIES}{PROMPT_CONVERSATIO
|
|||
|
||||
|
||||
class LlamaGuardSafetyImpl(Safety, ShieldsProtocolPrivate):
|
||||
def __init__(self, config: LlamaGuardConfig, deps) -> None:
|
||||
def __init__(self, context: ProviderContext, config: LlamaGuardConfig, deps) -> None:
|
||||
self.context = context
|
||||
self.config = config
|
||||
self.inference_api = deps[Api.inference]
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue