mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-19 07:49:40 +00:00
use OpenAIMessageParam for OpenAI message compatibility
This commit is contained in:
parent
172ef0c9c2
commit
279ac3bc51
2 changed files with 9 additions and 8 deletions
|
|
@ -1267,7 +1267,12 @@ async def prepare_openai_completion_params(**params):
|
|||
elif isinstance(value, dict):
|
||||
new_value = {k: await _prepare_value(v) for k, v in value.items()}
|
||||
elif isinstance(value, BaseModel):
|
||||
new_value = value.model_dump(exclude_none=True)
|
||||
# Special handling for OpenAIMessageParam, preserve as Pydantic objects
|
||||
if hasattr(value, 'role') and hasattr(value, 'content'):
|
||||
new_value = value
|
||||
else:
|
||||
# Other BaseModel objects get converted to dicts
|
||||
new_value = value.model_dump(exclude_none=True)
|
||||
return new_value
|
||||
|
||||
completion_params = {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue