mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 13:00:39 +00:00
always include the type field in requests
some endpoints, e.g. nvidia's hosted meta/llama-3.2-11b-vision-instruct, are strict about input schema. they require messages to include the type field, i.e. {"text": ..., "type": "text"}. the type field needs to be specified at construction time for it to be included in the request.
This commit is contained in:
parent
abfc4b3bce
commit
658465e088
1 changed files with 1 additions and 0 deletions
|
@ -527,6 +527,7 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
|||
elif isinstance(content, TextContentItem):
|
||||
return OpenAIChatCompletionContentPartTextParam(
|
||||
text=content.text,
|
||||
type="text",
|
||||
)
|
||||
elif isinstance(content, ImageContentItem):
|
||||
return OpenAIChatCompletionContentPartImageParam(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue