HOTFIX: rename all ssambanova to sambanova

This commit is contained in:
seyeong-han 2024-11-29 17:26:58 -06:00
parent a82eb97bdf
commit c69dc293ae
11 changed files with 51 additions and 51 deletions

View file

@ -1,13 +1,13 @@
services:
llamastack:
image: llamastack/distribution-ssambanova
image: llamastack/distribution-sambanova
network_mode: "host"
volumes:
- ~/.llama:/root/.llama
- ./run.yaml:/root/llamastack-run-ssambanova.yaml
- ./run.yaml:/root/llamastack-run-sambanova.yaml
ports:
- "5000:5000"
entrypoint: bash -c "python -m llama_stack.distribution.server.server --yaml_config /root/llamastack-run-ssambanova.yaml"
entrypoint: bash -c "python -m llama_stack.distribution.server.server --yaml_config /root/llamastack-run-sambanova.yaml"
deploy:
restart_policy:
condition: on-failure

View file

@ -1,7 +1,7 @@
version: "2"
image_name: ssambanova
image_name: sambanova
docker_image: null
conda_env: ssambanova
conda_env: sambanova
apis:
- inference
- safety
@ -13,11 +13,11 @@ apis:
- telemetry
providers:
inference:
- provider_id: ssambanova
provider_type: remote::ssambanova
- provider_id: sambanova
provider_type: remote::sambanova
config:
url: https://api.sambanova.ai/v1
api_key: ${env.SSAMBANOVA_API_KEY}
api_key: ${env.SAMBANOVA_API_KEY}
safety:
- provider_id: llama-guard
provider_type: inline::llama-guard
@ -29,7 +29,7 @@ providers:
persistence_store:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ssambanova}/agents_store.db
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/sambanova}/agents_store.db
memory:
- provider_id: faiss
provider_type: inline::faiss
@ -37,7 +37,7 @@ providers:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ssambanova}/faiss_store.db
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/sambanova}/faiss_store.db
datasetio:
- provider_id: localfs
provider_type: inline::localfs

View file

@ -153,12 +153,12 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_type="ssambanova",
adapter_type="sambanova",
pip_packages=[
"openai",
],
module="llama_stack.providers.remote.inference.ssambanova",
config_class="llama_stack.providers.remote.inference.ssambanova.SsambanovaImplConfig",
module="llama_stack.providers.remote.inference.sambanova",
config_class="llama_stack.providers.remote.inference.sambanova.SambanovaImplConfig",
),
),
]

View file

@ -4,14 +4,14 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from .config import SsambanovaImplConfig
from .ssambanova import SsambanovaInferenceAdapter
from .config import SambanovaImplConfig
from .sambanova import SambanovaInferenceAdapter
async def get_adapter_impl(config: SsambanovaImplConfig, _deps):
async def get_adapter_impl(config: SambanovaImplConfig, _deps):
assert isinstance(
config, SsambanovaImplConfig
config, SambanovaImplConfig
), f"Unexpected config type: {type(config)}"
impl = SsambanovaInferenceAdapter(config)
impl = SambanovaInferenceAdapter(config)
await impl.initialize()
return impl

View file

@ -10,12 +10,12 @@ from pydantic import BaseModel, Field
@json_schema_type
class SsambanovaImplConfig(BaseModel):
class SambanovaImplConfig(BaseModel):
url: str = Field(
default="https://api.sambanova.ai/v1",
description="The URL for the Ssambanova model serving endpoint",
description="The URL for the Sambanova model serving endpoint",
)
api_token: str = Field(
default=None,
description="The Ssambanova API token",
description="The Sambanova API token",
)

View file

@ -30,7 +30,7 @@ from llama_stack.providers.utils.inference.prompt_adapter import (
chat_completion_request_to_prompt,
)
from .config import SsambanovaImplConfig
from .config import SambanovaImplConfig
model_aliases = [
@ -57,8 +57,8 @@ model_aliases = [
]
class SsambanovaInferenceAdapter(ModelRegistryHelper, Inference):
def __init__(self, config: SsambanovaImplConfig) -> None:
class SambanovaInferenceAdapter(ModelRegistryHelper, Inference):
def __init__(self, config: SambanovaImplConfig) -> None:
ModelRegistryHelper.__init__(
self,
model_aliases=model_aliases,

View file

@ -4,4 +4,4 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from .ssambanova import get_distribution_template # noqa: F401
from .sambanova import get_distribution_template # noqa: F401

View file

@ -1,11 +1,11 @@
version: "2"
name: ssambanova
name: sambanova
distribution_spec:
description: Use Ssambanova for running LLM inference
description: Use Sambanova for running LLM inference
docker_image: null
providers:
inference:
- remote::ssambanova
- remote::sambanova
memory:
- inline::faiss
safety:

View file

@ -1,4 +1,4 @@
# Ssambanova Distribution
# Sambanova Distribution
```{toctree}
:maxdepth: 2
@ -37,9 +37,9 @@ The following models are available by default:
### Prerequisite: API Keys
Make sure you have access to a Ssambanova API Key. You can get one by visiting [Ssambanova](https://cloud.sambanova.ai/apis).
Make sure you have access to a Sambanova API Key. You can get one by visiting [Sambanova](https://cloud.sambanova.ai/apis).
## Running Llama Stack with Ssambanova
## Running Llama Stack with Sambanova
You can do this via Conda (build code) or Docker which has a pre-built image.
@ -62,16 +62,16 @@ docker run \
-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT \
llamastack/distribution-{{ name }} \
--port $LLAMA_STACK_PORT \
--env SSAMBANOVA_API_KEY=$SSAMBANOVA_API_KEY \
--env SAMBANOVA_API_KEY=$SAMBANOVA_API_KEY \
--env INFERENCE_MODEL=$INFERENCE_MODEL
```
### Via Conda
```bash
llama stack build --template ssambanova --image-type conda
llama stack build --template sambanova --image-type conda
llama stack run ./run.yaml \
--port $LLAMA_STACK_PORT \
--env SSAMBANOVA_API_KEY=$SSAMBANOVA_API_KEY \
--env SAMBANOVA_API_KEY=$SAMBANOVA_API_KEY \
--env INFERENCE_MODEL=$INFERENCE_MODEL
```

View file

@ -1,7 +1,7 @@
version: "2"
image_name: ssambanova
image_name: sambanova
docker_image: null
conda_env: ssambanova
conda_env: sambanova
apis:
- inference
- safety
@ -13,11 +13,11 @@ apis:
- telemetry
providers:
inference:
- provider_id: ssambanova
provider_type: remote::ssambanova
- provider_id: sambanova
provider_type: remote::sambanova
config:
url: https://api.sambanova.ai/v1
api_token: ${env.SSAMBANOVA_API_KEY}
api_token: ${env.SAMBANOVA_API_KEY}
safety:
- provider_id: llama-guard
provider_type: inline::llama-guard
@ -29,7 +29,7 @@ providers:
persistence_store:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ssambanova}/agents_store.db
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/sambanova}/agents_store.db
memory:
- provider_id: faiss
provider_type: inline::faiss
@ -37,7 +37,7 @@ providers:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ssambanova}/faiss_store.db
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/sambanova}/faiss_store.db
datasetio:
- provider_id: localfs
provider_type: inline::localfs

View file

@ -9,15 +9,15 @@ from pathlib import Path
from llama_models.sku_list import all_registered_models
from llama_stack.distribution.datatypes import ModelInput, Provider, ShieldInput
from llama_stack.providers.remote.inference.ssambanova import SsambanovaImplConfig
from llama_stack.providers.remote.inference.ssambanova.ssambanova import MODEL_ALIASES
from llama_stack.providers.remote.inference.sambanova import SambanovaImplConfig
from llama_stack.providers.remote.inference.sambanova.sambanova import MODEL_ALIASES
from llama_stack.templates.template import DistributionTemplate, RunConfigSettings
def get_distribution_template() -> DistributionTemplate:
providers = {
"inference": ["remote::ssambanova"],
"inference": ["remote::sambanova"],
"memory": ["inline::faiss", "remote::chromadb", "remote::pgvector"],
"safety": ["inline::llama-guard"],
"agents": ["inline::meta-reference"],
@ -25,9 +25,9 @@ def get_distribution_template() -> DistributionTemplate:
}
inference_provider = Provider(
provider_id="ssambanova",
provider_type="remote::ssambanova",
config=SsambanovaImplConfig.sample_run_config(),
provider_id="sambanova",
provider_type="remote::sambanova",
config=SambanovaImplConfig.sample_run_config(),
)
core_model_to_hf_repo = {
@ -42,9 +42,9 @@ def get_distribution_template() -> DistributionTemplate:
]
return DistributionTemplate(
name="ssambanova",
name="sambanova",
distro_type="self_hosted",
description="Use Ssambanova for running LLM inference",
description="Use Sambanova for running LLM inference",
docker_image=None,
template_path=Path(__file__).parent / "doc_template.md",
providers=providers,
@ -63,9 +63,9 @@ def get_distribution_template() -> DistributionTemplate:
"5001",
"Port for the Llama Stack distribution server",
),
"SSAMBANOVA_API_KEY": (
"SAMBANOVA_API_KEY": (
"",
"ssambanova API Key",
"sambanova API Key",
),
},
)