mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-29 19:34:19 +00:00
Fixes to the llama stack configure
script + inference adapters
This commit is contained in:
parent
4869f2b983
commit
1380d78c19
11 changed files with 124 additions and 37 deletions
|
@ -42,8 +42,8 @@ def available_inference_providers() -> List[ProviderSpec]:
|
|||
pip_packages=[
|
||||
"fireworks-ai",
|
||||
],
|
||||
module="llama_toolchain.inference.fireworks",
|
||||
config_class="llama_toolchain.inference.fireworks.FireworksImplConfig",
|
||||
module="llama_toolchain.inference.adapters.fireworks",
|
||||
config_class="llama_toolchain.inference.adapters.fireworks.FireworksImplConfig",
|
||||
),
|
||||
),
|
||||
remote_provider_spec(
|
||||
|
@ -53,8 +53,8 @@ def available_inference_providers() -> List[ProviderSpec]:
|
|||
pip_packages=[
|
||||
"together",
|
||||
],
|
||||
module="llama_toolchain.inference.together",
|
||||
config_class="llama_toolchain.inference.together.TogetherImplConfig",
|
||||
module="llama_toolchain.inference.adapters.together",
|
||||
config_class="llama_toolchain.inference.adapters.together.TogetherImplConfig",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue