diff --git a/llama_stack/providers/remote/inference/fireworks/fireworks.py b/llama_stack/providers/remote/inference/fireworks/fireworks.py index d9ef57b15..975ec4893 100644 --- a/llama_stack/providers/remote/inference/fireworks/fireworks.py +++ b/llama_stack/providers/remote/inference/fireworks/fireworks.py @@ -65,6 +65,10 @@ MODEL_ALIASES = [ "fireworks/llama-v3p2-90b-vision-instruct", CoreModelId.llama3_2_90b_vision_instruct.value, ), + build_model_alias( + "fireworks/llama-v3p3-70b-instruct", + CoreModelId.llama3_3_70b_instruct.value, + ), build_model_alias( "fireworks/llama-guard-3-8b", CoreModelId.llama_guard_3_8b.value, diff --git a/llama_stack/templates/fireworks/run.yaml b/llama_stack/templates/fireworks/run.yaml index cb31b4678..99f155a4a 100644 --- a/llama_stack/templates/fireworks/run.yaml +++ b/llama_stack/templates/fireworks/run.yaml @@ -110,6 +110,11 @@ models: provider_id: fireworks provider_model_id: fireworks/llama-v3p2-90b-vision-instruct model_type: llm +- metadata: {} + model_id: meta-llama/Llama-3.3-70B-Instruct + provider_id: fireworks + provider_model_id: fireworks/llama-v3p3-70b-instruct + model_type: llm - metadata: {} model_id: meta-llama/Llama-Guard-3-8B provider_id: fireworks