mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
serialize binary data fix
This commit is contained in:
parent
0bb28cf20a
commit
48df492a44
2 changed files with 4 additions and 6 deletions
|
@ -27,6 +27,9 @@ class _URLOrData(BaseModel):
|
|||
return values
|
||||
return {"url": values}
|
||||
|
||||
class Config:
|
||||
json_encoders = {bytes: lambda v: str("binary data")}
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ImageContentItem(_URLOrData):
|
||||
|
|
|
@ -23,12 +23,7 @@ def serialize_value(value: Any) -> Any:
|
|||
elif hasattr(value, "_name_"):
|
||||
return value._name_
|
||||
elif isinstance(value, BaseModel):
|
||||
try:
|
||||
return value.model_dump_json()
|
||||
except Exception as e:
|
||||
# We cannot serialize the binary bytes data,
|
||||
# So we return the model dump as a string for logging
|
||||
return str(value.model_dump())
|
||||
return value.model_dump_json()
|
||||
elif isinstance(value, (list, tuple, set)):
|
||||
return [serialize_value(item) for item in value]
|
||||
elif isinstance(value, dict):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue