Fix assert message and call to completion_request_to_prompt in remote:vllm

Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
Yuan Tang 2025-01-02 18:29:21 -05:00
parent e3f187fb83
commit 3537a1401f
No known key found for this signature in database

View file

@ -193,10 +193,9 @@ class VLLMInferenceAdapter(Inference, ModelsProtocolPrivate):
else: else:
assert ( assert (
not media_present not media_present
), "Together does not support media for Completion requests" ), "vLLM does not support media for Completion requests"
input_dict["prompt"] = await completion_request_to_prompt( input_dict["prompt"] = await completion_request_to_prompt(
request, request,
self.register_helper.get_llama_model(request.model),
self.formatter, self.formatter,
) )