mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 13:00:39 +00:00
fix openai-compat / litellm conversions
This commit is contained in:
parent
4c0f122a4b
commit
1cff4c5907
1 changed files with 7 additions and 10 deletions
|
@ -518,15 +518,12 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
|||
# {"type": "image", "image": {"url": {"uri": ...}}} -> {"type": "image_url", "image_url": {"url": ...}}
|
||||
# {"type": "image", "image": {"data": ...}} -> {"type": "image_url", "image_url": {"url": "data:image/?;base64,..."}}
|
||||
# List[...] -> List[...]
|
||||
async def _convert_user_message_content(
|
||||
async def _convert_message_content(
|
||||
content: InterleavedContent,
|
||||
) -> Union[str, Iterable[OpenAIChatCompletionContentPartParam]]:
|
||||
# Llama Stack and OpenAI spec match for str and text input
|
||||
if isinstance(content, str):
|
||||
return OpenAIChatCompletionContentPartTextParam(
|
||||
type="text",
|
||||
text=content,
|
||||
)
|
||||
return content
|
||||
elif isinstance(content, TextContentItem):
|
||||
return OpenAIChatCompletionContentPartTextParam(
|
||||
type="text",
|
||||
|
@ -538,7 +535,7 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
|||
image_url=OpenAIImageURL(url=await convert_image_content_to_url(content)),
|
||||
)
|
||||
elif isinstance(content, List):
|
||||
return [await _convert_user_message_content(item) for item in content]
|
||||
return [await _convert_message_content(item) for item in content]
|
||||
else:
|
||||
raise ValueError(f"Unsupported content type: {type(content)}")
|
||||
|
||||
|
@ -546,12 +543,12 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
|||
if isinstance(message, UserMessage):
|
||||
out = OpenAIChatCompletionUserMessage(
|
||||
role="user",
|
||||
content=await _convert_user_message_content(message.content),
|
||||
content=await _convert_message_content(message.content),
|
||||
)
|
||||
elif isinstance(message, CompletionMessage):
|
||||
out = OpenAIChatCompletionAssistantMessage(
|
||||
role="assistant",
|
||||
content=message.content,
|
||||
content=await _convert_message_content(message.content),
|
||||
tool_calls=[
|
||||
OpenAIChatCompletionMessageToolCall(
|
||||
id=tool.call_id,
|
||||
|
@ -568,12 +565,12 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
|||
out = OpenAIChatCompletionToolMessage(
|
||||
role="tool",
|
||||
tool_call_id=message.call_id,
|
||||
content=message.content,
|
||||
content=await _convert_message_content(message.content),
|
||||
)
|
||||
elif isinstance(message, SystemMessage):
|
||||
out = OpenAIChatCompletionSystemMessage(
|
||||
role="system",
|
||||
content=message.content,
|
||||
content=await _convert_message_content(message.content),
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported message type: {type(message)}")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue