diff --git a/docs/source/distributions/self_hosted_distro/cerebras.md b/docs/source/distributions/self_hosted_distro/cerebras.md index 08b35809a..a8886d39b 100644 --- a/docs/source/distributions/self_hosted_distro/cerebras.md +++ b/docs/source/distributions/self_hosted_distro/cerebras.md @@ -23,7 +23,7 @@ The following environment variables can be configured: The following models are available by default: - `meta-llama/Llama-3.1-8B-Instruct (llama3.1-8b)` -- `meta-llama/Llama-3.1-70B-Instruct (llama3.1-70b)` +- `meta-llama/Llama-3.3-70B-Instruct (llama-3.3-70b)` ### Prerequisite: API Keys diff --git a/llama_stack/providers/remote/inference/cerebras/cerebras.py b/llama_stack/providers/remote/inference/cerebras/cerebras.py index 5a9fef22a..2ff213c2e 100644 --- a/llama_stack/providers/remote/inference/cerebras/cerebras.py +++ b/llama_stack/providers/remote/inference/cerebras/cerebras.py @@ -41,8 +41,8 @@ model_aliases = [ CoreModelId.llama3_1_8b_instruct.value, ), build_model_alias( - "llama3.1-70b", - CoreModelId.llama3_1_70b_instruct.value, + "llama-3.3-70b", + CoreModelId.llama3_3_70b_instruct.value, ), ] diff --git a/llama_stack/templates/cerebras/run.yaml b/llama_stack/templates/cerebras/run.yaml index b7c2d316e..05b21bf0a 100644 --- a/llama_stack/templates/cerebras/run.yaml +++ b/llama_stack/templates/cerebras/run.yaml @@ -56,9 +56,9 @@ models: provider_model_id: llama3.1-8b model_type: llm - metadata: {} - model_id: meta-llama/Llama-3.1-70B-Instruct + model_id: meta-llama/Llama-3.3-70B-Instruct provider_id: cerebras - provider_model_id: llama3.1-70b + provider_model_id: llama-3.3-70b model_type: llm - metadata: embedding_dimension: 384