mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-18 23:28:53 +00:00
More work towards making remote stacks usable from tests
This commit is contained in:
parent
8645f8bc9e
commit
8b7be87bec
7 changed files with 91 additions and 99 deletions
|
@ -186,12 +186,7 @@ async def inference_stack(request, inference_model):
|
|||
[Api.inference],
|
||||
{"inference": inference_fixture.providers},
|
||||
inference_fixture.provider_data,
|
||||
models=[
|
||||
ModelInput(
|
||||
model_id=inference_model,
|
||||
provider_id=inference_fixture.providers[0].provider_id,
|
||||
)
|
||||
],
|
||||
models=[ModelInput(model_id=inference_model)],
|
||||
)
|
||||
|
||||
return (impls[Api.inference], impls[Api.models])
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue