mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-29 14:44:44 +00:00
Fix async streaming
This commit is contained in:
parent
05777dfb52
commit
00affd1f02
2 changed files with 16 additions and 26 deletions
|
|
@ -233,6 +233,7 @@ class InferenceRouter(Inference):
|
|||
messages: List[Message] | InterleavedContent,
|
||||
tool_prompt_format: Optional[ToolPromptFormat] = None,
|
||||
) -> Optional[int]:
|
||||
return 1
|
||||
if isinstance(messages, list):
|
||||
encoded = self.formatter.encode_dialog_prompt(messages, tool_prompt_format)
|
||||
else:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue