mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-13 04:22:35 +00:00
simplified some, walked back some decisions
This commit is contained in:
parent
af7472cdb0
commit
636764c2a1
90 changed files with 887 additions and 570 deletions
|
|
@ -80,9 +80,9 @@ async def get_auto_router_impl(
|
|||
|
||||
# TODO: move pass configs to routers instead
|
||||
if api == Api.inference:
|
||||
inference_ref = run_config.storage.inference
|
||||
inference_ref = run_config.inference_store
|
||||
if not inference_ref:
|
||||
raise ValueError("storage.inference must be configured in run config")
|
||||
raise ValueError("inference_store must be configured in run config")
|
||||
|
||||
inference_store = InferenceStore(
|
||||
reference=inference_ref,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue