diff --git a/llama_stack/providers/remote/inference/fireworks/fireworks.py b/llama_stack/providers/remote/inference/fireworks/fireworks.py index e22144326..5c98d2054 100644 --- a/llama_stack/providers/remote/inference/fireworks/fireworks.py +++ b/llama_stack/providers/remote/inference/fireworks/fireworks.py @@ -265,7 +265,8 @@ class FireworksInferenceAdapter( if isinstance(request, ChatCompletionRequest): if media_present: input_dict["messages"] = [ - await convert_message_to_openai_dict(m) for m in request.messages + await convert_message_to_openai_dict(m, download=True) + for m in request.messages ] else: input_dict["prompt"] = await chat_completion_request_to_prompt(