mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-13 04:22:35 +00:00
group all server stores under storage.stores
This commit is contained in:
parent
636764c2a1
commit
2bba56a0a8
35 changed files with 806 additions and 503 deletions
|
|
@ -80,9 +80,9 @@ async def get_auto_router_impl(
|
|||
|
||||
# TODO: move pass configs to routers instead
|
||||
if api == Api.inference:
|
||||
inference_ref = run_config.inference_store
|
||||
inference_ref = run_config.storage.stores.inference
|
||||
if not inference_ref:
|
||||
raise ValueError("inference_store must be configured in run config")
|
||||
raise ValueError("storage.stores.inference must be configured in run config")
|
||||
|
||||
inference_store = InferenceStore(
|
||||
reference=inference_ref,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue