forked from phoenix-oss/llama-stack-mirror
Fixes to the llama stack configure
script + inference adapters
This commit is contained in:
parent
4869f2b983
commit
1380d78c19
11 changed files with 124 additions and 37 deletions
|
@ -7,7 +7,7 @@
|
|||
from .config import FireworksImplConfig
|
||||
|
||||
|
||||
async def get_adapter_impl(config: FireworksImplConfig, _deps) -> Inference:
|
||||
async def get_adapter_impl(config: FireworksImplConfig, _deps):
|
||||
from .fireworks import FireworksInferenceAdapter
|
||||
|
||||
assert isinstance(
|
||||
|
|
|
@ -11,7 +11,7 @@ from pydantic import BaseModel, Field
|
|||
@json_schema_type
|
||||
class FireworksImplConfig(BaseModel):
|
||||
url: str = Field(
|
||||
default="https://api.fireworks.api/inference",
|
||||
default="https://api.fireworks.ai/inference",
|
||||
description="The URL for the Fireworks server",
|
||||
)
|
||||
api_key: str = Field(
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
from .config import TogetherImplConfig
|
||||
|
||||
|
||||
async def get_adapter_impl(config: TogetherImplConfig, _deps) -> Inference:
|
||||
async def get_adapter_impl(config: TogetherImplConfig, _deps):
|
||||
from .together import TogetherInferenceAdapter
|
||||
|
||||
assert isinstance(
|
||||
|
|
|
@ -42,8 +42,8 @@ def available_inference_providers() -> List[ProviderSpec]:
|
|||
pip_packages=[
|
||||
"fireworks-ai",
|
||||
],
|
||||
module="llama_toolchain.inference.fireworks",
|
||||
config_class="llama_toolchain.inference.fireworks.FireworksImplConfig",
|
||||
module="llama_toolchain.inference.adapters.fireworks",
|
||||
config_class="llama_toolchain.inference.adapters.fireworks.FireworksImplConfig",
|
||||
),
|
||||
),
|
||||
remote_provider_spec(
|
||||
|
@ -53,8 +53,8 @@ def available_inference_providers() -> List[ProviderSpec]:
|
|||
pip_packages=[
|
||||
"together",
|
||||
],
|
||||
module="llama_toolchain.inference.together",
|
||||
config_class="llama_toolchain.inference.together.TogetherImplConfig",
|
||||
module="llama_toolchain.inference.adapters.together",
|
||||
config_class="llama_toolchain.inference.adapters.together.TogetherImplConfig",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue