mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-13 05:17:26 +00:00
small enhancement, immaterial mostly
This commit is contained in:
parent
fea9ef59b7
commit
88768a93eb
2 changed files with 10 additions and 2 deletions
|
@ -46,9 +46,10 @@ def pytest_generate_tests(metafunc):
|
||||||
if ("Vision" in cls_name and "Vision" in model) or ("Vision" not in cls_name and "Vision" not in model):
|
if ("Vision" in cls_name and "Vision" in model) or ("Vision" not in cls_name and "Vision" not in model):
|
||||||
params.append(pytest.param(model, id=model))
|
params.append(pytest.param(model, id=model))
|
||||||
|
|
||||||
|
print(f"params: {params}")
|
||||||
if not params:
|
if not params:
|
||||||
model = metafunc.config.getoption("--inference-model")
|
model = metafunc.config.getoption("--inference-model")
|
||||||
params = [pytest.param(model, id="")]
|
params = [pytest.param(model, id=model)]
|
||||||
|
|
||||||
metafunc.parametrize(
|
metafunc.parametrize(
|
||||||
"inference_model",
|
"inference_model",
|
||||||
|
|
|
@ -21,6 +21,9 @@ from openai.types.chat import (
|
||||||
from openai.types.chat import (
|
from openai.types.chat import (
|
||||||
ChatCompletionContentPartParam as OpenAIChatCompletionContentPartParam,
|
ChatCompletionContentPartParam as OpenAIChatCompletionContentPartParam,
|
||||||
)
|
)
|
||||||
|
from openai.types.chat import (
|
||||||
|
ChatCompletionContentPartTextParam as OpenAIChatCompletionContentPartTextParam,
|
||||||
|
)
|
||||||
from openai.types.chat import (
|
from openai.types.chat import (
|
||||||
ChatCompletionMessageParam as OpenAIChatCompletionMessage,
|
ChatCompletionMessageParam as OpenAIChatCompletionMessage,
|
||||||
)
|
)
|
||||||
|
@ -519,8 +522,12 @@ async def convert_message_to_openai_dict_new(message: Message | Dict) -> OpenAIC
|
||||||
content: InterleavedContent,
|
content: InterleavedContent,
|
||||||
) -> Union[str, Iterable[OpenAIChatCompletionContentPartParam]]:
|
) -> Union[str, Iterable[OpenAIChatCompletionContentPartParam]]:
|
||||||
# Llama Stack and OpenAI spec match for str and text input
|
# Llama Stack and OpenAI spec match for str and text input
|
||||||
if isinstance(content, str) or isinstance(content, TextContentItem):
|
if isinstance(content, str):
|
||||||
return content
|
return content
|
||||||
|
elif isinstance(content, TextContentItem):
|
||||||
|
return OpenAIChatCompletionContentPartTextParam(
|
||||||
|
text=content.text,
|
||||||
|
)
|
||||||
elif isinstance(content, ImageContentItem):
|
elif isinstance(content, ImageContentItem):
|
||||||
return OpenAIChatCompletionContentPartImageParam(
|
return OpenAIChatCompletionContentPartImageParam(
|
||||||
image_url=OpenAIImageURL(url=await convert_image_content_to_url(content)),
|
image_url=OpenAIImageURL(url=await convert_image_content_to_url(content)),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue