mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
Fix assert message and call to completion_request_to_prompt in remote:vllm
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
e3f187fb83
commit
3537a1401f
1 changed files with 1 additions and 2 deletions
|
@ -193,10 +193,9 @@ class VLLMInferenceAdapter(Inference, ModelsProtocolPrivate):
|
|||
else:
|
||||
assert (
|
||||
not media_present
|
||||
), "Together does not support media for Completion requests"
|
||||
), "vLLM does not support media for Completion requests"
|
||||
input_dict["prompt"] = await completion_request_to_prompt(
|
||||
request,
|
||||
self.register_helper.get_llama_model(request.model),
|
||||
self.formatter,
|
||||
)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue