mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
meta reference inference fixes (#797)
Miscellaneous fixes for meta reference inference Tests for log probs dont pass because meta reference does not support top_k > 1
This commit is contained in:
parent
cb41848a2a
commit
9f14382d82
5 changed files with 20 additions and 12 deletions
|
@ -227,9 +227,11 @@ async def completion_request_to_prompt_model_input_info(
|
|||
def augment_content_with_response_format_prompt(response_format, content):
|
||||
if fmt_prompt := response_format_prompt(response_format):
|
||||
if isinstance(content, list):
|
||||
return content + [fmt_prompt]
|
||||
return content + [TextContentItem(text=fmt_prompt)]
|
||||
elif isinstance(content, str):
|
||||
return [TextContentItem(text=content), TextContentItem(text=fmt_prompt)]
|
||||
else:
|
||||
return [content, fmt_prompt]
|
||||
return [content, TextContentItem(text=fmt_prompt)]
|
||||
|
||||
return content
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue